diff --git a/.gitattributes b/.gitattributes
index a6344aac8c09253b3b630fb776ae94478aa0275b..6a7421c0f2984115eba1131cd54efddf75e131ee 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -33,3 +33,9 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
+checkpoint-1000/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-1500/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-2000/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-2500/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-2997/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-500/tokenizer.json filter=lfs diff=lfs merge=lfs -text
diff --git a/checkpoint-1000/config.json b/checkpoint-1000/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..49d4bd1e1961ef7daf9af9a7dbe539789d58d949
--- /dev/null
+++ b/checkpoint-1000/config.json
@@ -0,0 +1,35 @@
+{
+ "_name_or_path": "facebook/nllb-200-distilled-600M",
+ "activation_dropout": 0.0,
+ "activation_function": "relu",
+ "architectures": [
+ "M2M100ForConditionalGeneration"
+ ],
+ "attention_dropout": 0.1,
+ "bos_token_id": 0,
+ "d_model": 1024,
+ "decoder_attention_heads": 16,
+ "decoder_ffn_dim": 4096,
+ "decoder_layerdrop": 0,
+ "decoder_layers": 12,
+ "decoder_start_token_id": 2,
+ "dropout": 0.1,
+ "encoder_attention_heads": 16,
+ "encoder_ffn_dim": 4096,
+ "encoder_layerdrop": 0,
+ "encoder_layers": 12,
+ "eos_token_id": 2,
+ "init_std": 0.02,
+ "is_encoder_decoder": true,
+ "max_length": 200,
+ "max_position_embeddings": 1024,
+ "model_type": "m2m_100",
+ "num_hidden_layers": 12,
+ "pad_token_id": 1,
+ "scale_embedding": true,
+ "tokenizer_class": "NllbTokenizer",
+ "torch_dtype": "float32",
+ "transformers_version": "4.43.1",
+ "use_cache": true,
+ "vocab_size": 256206
+}
diff --git a/checkpoint-1000/generation_config.json b/checkpoint-1000/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..680d3e0504023804deeb427766576194a0f17d47
--- /dev/null
+++ b/checkpoint-1000/generation_config.json
@@ -0,0 +1,9 @@
+{
+ "_from_model_config": true,
+ "bos_token_id": 0,
+ "decoder_start_token_id": 2,
+ "eos_token_id": 2,
+ "max_length": 200,
+ "pad_token_id": 1,
+ "transformers_version": "4.43.1"
+}
diff --git a/checkpoint-1000/model.safetensors b/checkpoint-1000/model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..4835739ea275035b00de6c9d55c80a544ea2d129
--- /dev/null
+++ b/checkpoint-1000/model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e9ea0af968d16712af35a3774ec4fa89d06e322c814331b646307adcd171a006
+size 2460354912
diff --git a/checkpoint-1000/optimizer.pt b/checkpoint-1000/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..17411a9ab8c9c6e6052bb6b1bc1db5b21e1321f9
--- /dev/null
+++ b/checkpoint-1000/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4a34a98a18304d3f78030c42ae18f25433256e2e40c36c59959291eecd515cba
+size 5125261
diff --git a/checkpoint-1000/rng_state.pth b/checkpoint-1000/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..2beb7b56823f6f9c468aa00affc0c03cff41e7c0
--- /dev/null
+++ b/checkpoint-1000/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:66a0f9b5b3dcfe7c5d3752567edb009b563647451cad20161c60518861b49a0b
+size 14244
diff --git a/checkpoint-1000/scheduler.pt b/checkpoint-1000/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..9821fb45b0a1df60214edcf1899282bb8e5baf49
--- /dev/null
+++ b/checkpoint-1000/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:570e13a31cf6d8596cc77b49b9cd5f0dba53d08fce525ea305a66da574a18d47
+size 1064
diff --git a/checkpoint-1000/sentencepiece.bpe.model b/checkpoint-1000/sentencepiece.bpe.model
new file mode 100644
index 0000000000000000000000000000000000000000..dc2262d3e1d375b235eb71c24119c8e73f85d4ad
--- /dev/null
+++ b/checkpoint-1000/sentencepiece.bpe.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:14bb8dfb35c0ffdea7bc01e56cea38b9e3d5efcdcb9c251d6b40538e1aab555a
+size 4852054
diff --git a/checkpoint-1000/special_tokens_map.json b/checkpoint-1000/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..770c6f4e25faf27bbc3878b806f2ecfb88c5169e
--- /dev/null
+++ b/checkpoint-1000/special_tokens_map.json
@@ -0,0 +1,255 @@
+{
+ "additional_special_tokens": [
+ "ace_Arab",
+ "ace_Latn",
+ "acm_Arab",
+ "acq_Arab",
+ "aeb_Arab",
+ "afr_Latn",
+ "ajp_Arab",
+ "aka_Latn",
+ "amh_Ethi",
+ "apc_Arab",
+ "arb_Arab",
+ "ars_Arab",
+ "ary_Arab",
+ "arz_Arab",
+ "asm_Beng",
+ "ast_Latn",
+ "awa_Deva",
+ "ayr_Latn",
+ "azb_Arab",
+ "azj_Latn",
+ "bak_Cyrl",
+ "bam_Latn",
+ "ban_Latn",
+ "bel_Cyrl",
+ "bem_Latn",
+ "ben_Beng",
+ "bho_Deva",
+ "bjn_Arab",
+ "bjn_Latn",
+ "bod_Tibt",
+ "bos_Latn",
+ "bug_Latn",
+ "bul_Cyrl",
+ "cat_Latn",
+ "ceb_Latn",
+ "ces_Latn",
+ "cjk_Latn",
+ "ckb_Arab",
+ "crh_Latn",
+ "cym_Latn",
+ "dan_Latn",
+ "deu_Latn",
+ "dik_Latn",
+ "dyu_Latn",
+ "dzo_Tibt",
+ "ell_Grek",
+ "eng_Latn",
+ "epo_Latn",
+ "est_Latn",
+ "eus_Latn",
+ "ewe_Latn",
+ "fao_Latn",
+ "pes_Arab",
+ "fij_Latn",
+ "fin_Latn",
+ "fon_Latn",
+ "fra_Latn",
+ "fur_Latn",
+ "fuv_Latn",
+ "gla_Latn",
+ "gle_Latn",
+ "glg_Latn",
+ "grn_Latn",
+ "guj_Gujr",
+ "hat_Latn",
+ "hau_Latn",
+ "heb_Hebr",
+ "hin_Deva",
+ "hne_Deva",
+ "hrv_Latn",
+ "hun_Latn",
+ "hye_Armn",
+ "ibo_Latn",
+ "ilo_Latn",
+ "ind_Latn",
+ "isl_Latn",
+ "ita_Latn",
+ "jav_Latn",
+ "jpn_Jpan",
+ "kab_Latn",
+ "kac_Latn",
+ "kam_Latn",
+ "kan_Knda",
+ "kas_Arab",
+ "kas_Deva",
+ "kat_Geor",
+ "knc_Arab",
+ "knc_Latn",
+ "kaz_Cyrl",
+ "kbp_Latn",
+ "kea_Latn",
+ "khm_Khmr",
+ "kik_Latn",
+ "kin_Latn",
+ "kir_Cyrl",
+ "kmb_Latn",
+ "kon_Latn",
+ "kor_Hang",
+ "kmr_Latn",
+ "lao_Laoo",
+ "lvs_Latn",
+ "lij_Latn",
+ "lim_Latn",
+ "lin_Latn",
+ "lit_Latn",
+ "lmo_Latn",
+ "ltg_Latn",
+ "ltz_Latn",
+ "lua_Latn",
+ "lug_Latn",
+ "luo_Latn",
+ "lus_Latn",
+ "mag_Deva",
+ "mai_Deva",
+ "mal_Mlym",
+ "mar_Deva",
+ "min_Latn",
+ "mkd_Cyrl",
+ "plt_Latn",
+ "mlt_Latn",
+ "mni_Beng",
+ "khk_Cyrl",
+ "mos_Latn",
+ "mri_Latn",
+ "zsm_Latn",
+ "mya_Mymr",
+ "nld_Latn",
+ "nno_Latn",
+ "nob_Latn",
+ "npi_Deva",
+ "nso_Latn",
+ "nus_Latn",
+ "nya_Latn",
+ "oci_Latn",
+ "gaz_Latn",
+ "ory_Orya",
+ "pag_Latn",
+ "pan_Guru",
+ "pap_Latn",
+ "pol_Latn",
+ "por_Latn",
+ "prs_Arab",
+ "pbt_Arab",
+ "quy_Latn",
+ "ron_Latn",
+ "run_Latn",
+ "rus_Cyrl",
+ "sag_Latn",
+ "san_Deva",
+ "sat_Beng",
+ "scn_Latn",
+ "shn_Mymr",
+ "sin_Sinh",
+ "slk_Latn",
+ "slv_Latn",
+ "smo_Latn",
+ "sna_Latn",
+ "snd_Arab",
+ "som_Latn",
+ "sot_Latn",
+ "spa_Latn",
+ "als_Latn",
+ "srd_Latn",
+ "srp_Cyrl",
+ "ssw_Latn",
+ "sun_Latn",
+ "swe_Latn",
+ "swh_Latn",
+ "szl_Latn",
+ "tam_Taml",
+ "tat_Cyrl",
+ "tel_Telu",
+ "tgk_Cyrl",
+ "tgl_Latn",
+ "tha_Thai",
+ "tir_Ethi",
+ "taq_Latn",
+ "taq_Tfng",
+ "tpi_Latn",
+ "tsn_Latn",
+ "tso_Latn",
+ "tuk_Latn",
+ "tum_Latn",
+ "tur_Latn",
+ "twi_Latn",
+ "tzm_Tfng",
+ "uig_Arab",
+ "ukr_Cyrl",
+ "umb_Latn",
+ "urd_Arab",
+ "uzn_Latn",
+ "vec_Latn",
+ "vie_Latn",
+ "war_Latn",
+ "wol_Latn",
+ "xho_Latn",
+ "ydd_Hebr",
+ "yor_Latn",
+ "yue_Hant",
+ "zho_Hans",
+ "zho_Hant",
+ "zul_Latn"
+ ],
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "cls_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "mask_token": {
+ "content": "",
+ "lstrip": true,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "sep_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-1000/tokenizer.json b/checkpoint-1000/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..98050e98b98364c06d83b3f41864076220cb8408
--- /dev/null
+++ b/checkpoint-1000/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3b39b25b0763a1dd69dec54081fafcf10770d9f2538a3bd975a0c4be6d60a9c2
+size 17331294
diff --git a/checkpoint-1000/tokenizer_config.json b/checkpoint-1000/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..f1424d3657c008568198b44be241646482e7e9f2
--- /dev/null
+++ b/checkpoint-1000/tokenizer_config.json
@@ -0,0 +1,1878 @@
+{
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "3": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256001": {
+ "content": "ace_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256002": {
+ "content": "ace_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256003": {
+ "content": "acm_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256004": {
+ "content": "acq_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256005": {
+ "content": "aeb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256006": {
+ "content": "afr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256007": {
+ "content": "ajp_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256008": {
+ "content": "aka_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256009": {
+ "content": "amh_Ethi",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256010": {
+ "content": "apc_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256011": {
+ "content": "arb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256012": {
+ "content": "ars_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256013": {
+ "content": "ary_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256014": {
+ "content": "arz_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256015": {
+ "content": "asm_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256016": {
+ "content": "ast_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256017": {
+ "content": "awa_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256018": {
+ "content": "ayr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256019": {
+ "content": "azb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256020": {
+ "content": "azj_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256021": {
+ "content": "bak_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256022": {
+ "content": "bam_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256023": {
+ "content": "ban_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256024": {
+ "content": "bel_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256025": {
+ "content": "bem_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256026": {
+ "content": "ben_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256027": {
+ "content": "bho_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256028": {
+ "content": "bjn_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256029": {
+ "content": "bjn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256030": {
+ "content": "bod_Tibt",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256031": {
+ "content": "bos_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256032": {
+ "content": "bug_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256033": {
+ "content": "bul_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256034": {
+ "content": "cat_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256035": {
+ "content": "ceb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256036": {
+ "content": "ces_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256037": {
+ "content": "cjk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256038": {
+ "content": "ckb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256039": {
+ "content": "crh_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256040": {
+ "content": "cym_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256041": {
+ "content": "dan_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256042": {
+ "content": "deu_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256043": {
+ "content": "dik_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256044": {
+ "content": "dyu_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256045": {
+ "content": "dzo_Tibt",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256046": {
+ "content": "ell_Grek",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256047": {
+ "content": "eng_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256048": {
+ "content": "epo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256049": {
+ "content": "est_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256050": {
+ "content": "eus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256051": {
+ "content": "ewe_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256052": {
+ "content": "fao_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256053": {
+ "content": "pes_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256054": {
+ "content": "fij_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256055": {
+ "content": "fin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256056": {
+ "content": "fon_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256057": {
+ "content": "fra_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256058": {
+ "content": "fur_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256059": {
+ "content": "fuv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256060": {
+ "content": "gla_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256061": {
+ "content": "gle_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256062": {
+ "content": "glg_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256063": {
+ "content": "grn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256064": {
+ "content": "guj_Gujr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256065": {
+ "content": "hat_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256066": {
+ "content": "hau_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256067": {
+ "content": "heb_Hebr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256068": {
+ "content": "hin_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256069": {
+ "content": "hne_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256070": {
+ "content": "hrv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256071": {
+ "content": "hun_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256072": {
+ "content": "hye_Armn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256073": {
+ "content": "ibo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256074": {
+ "content": "ilo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256075": {
+ "content": "ind_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256076": {
+ "content": "isl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256077": {
+ "content": "ita_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256078": {
+ "content": "jav_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256079": {
+ "content": "jpn_Jpan",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256080": {
+ "content": "kab_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256081": {
+ "content": "kac_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256082": {
+ "content": "kam_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256083": {
+ "content": "kan_Knda",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256084": {
+ "content": "kas_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256085": {
+ "content": "kas_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256086": {
+ "content": "kat_Geor",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256087": {
+ "content": "knc_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256088": {
+ "content": "knc_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256089": {
+ "content": "kaz_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256090": {
+ "content": "kbp_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256091": {
+ "content": "kea_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256092": {
+ "content": "khm_Khmr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256093": {
+ "content": "kik_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256094": {
+ "content": "kin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256095": {
+ "content": "kir_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256096": {
+ "content": "kmb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256097": {
+ "content": "kon_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256098": {
+ "content": "kor_Hang",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256099": {
+ "content": "kmr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256100": {
+ "content": "lao_Laoo",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256101": {
+ "content": "lvs_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256102": {
+ "content": "lij_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256103": {
+ "content": "lim_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256104": {
+ "content": "lin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256105": {
+ "content": "lit_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256106": {
+ "content": "lmo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256107": {
+ "content": "ltg_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256108": {
+ "content": "ltz_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256109": {
+ "content": "lua_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256110": {
+ "content": "lug_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256111": {
+ "content": "luo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256112": {
+ "content": "lus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256113": {
+ "content": "mag_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256114": {
+ "content": "mai_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256115": {
+ "content": "mal_Mlym",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256116": {
+ "content": "mar_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256117": {
+ "content": "min_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256118": {
+ "content": "mkd_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256119": {
+ "content": "plt_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256120": {
+ "content": "mlt_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256121": {
+ "content": "mni_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256122": {
+ "content": "khk_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256123": {
+ "content": "mos_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256124": {
+ "content": "mri_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256125": {
+ "content": "zsm_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256126": {
+ "content": "mya_Mymr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256127": {
+ "content": "nld_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256128": {
+ "content": "nno_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256129": {
+ "content": "nob_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256130": {
+ "content": "npi_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256131": {
+ "content": "nso_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256132": {
+ "content": "nus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256133": {
+ "content": "nya_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256134": {
+ "content": "oci_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256135": {
+ "content": "gaz_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256136": {
+ "content": "ory_Orya",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256137": {
+ "content": "pag_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256138": {
+ "content": "pan_Guru",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256139": {
+ "content": "pap_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256140": {
+ "content": "pol_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256141": {
+ "content": "por_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256142": {
+ "content": "prs_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256143": {
+ "content": "pbt_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256144": {
+ "content": "quy_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256145": {
+ "content": "ron_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256146": {
+ "content": "run_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256147": {
+ "content": "rus_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256148": {
+ "content": "sag_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256149": {
+ "content": "san_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256150": {
+ "content": "sat_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256151": {
+ "content": "scn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256152": {
+ "content": "shn_Mymr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256153": {
+ "content": "sin_Sinh",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256154": {
+ "content": "slk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256155": {
+ "content": "slv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256156": {
+ "content": "smo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256157": {
+ "content": "sna_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256158": {
+ "content": "snd_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256159": {
+ "content": "som_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256160": {
+ "content": "sot_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256161": {
+ "content": "spa_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256162": {
+ "content": "als_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256163": {
+ "content": "srd_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256164": {
+ "content": "srp_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256165": {
+ "content": "ssw_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256166": {
+ "content": "sun_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256167": {
+ "content": "swe_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256168": {
+ "content": "swh_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256169": {
+ "content": "szl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256170": {
+ "content": "tam_Taml",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256171": {
+ "content": "tat_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256172": {
+ "content": "tel_Telu",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256173": {
+ "content": "tgk_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256174": {
+ "content": "tgl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256175": {
+ "content": "tha_Thai",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256176": {
+ "content": "tir_Ethi",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256177": {
+ "content": "taq_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256178": {
+ "content": "taq_Tfng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256179": {
+ "content": "tpi_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256180": {
+ "content": "tsn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256181": {
+ "content": "tso_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256182": {
+ "content": "tuk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256183": {
+ "content": "tum_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256184": {
+ "content": "tur_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256185": {
+ "content": "twi_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256186": {
+ "content": "tzm_Tfng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256187": {
+ "content": "uig_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256188": {
+ "content": "ukr_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256189": {
+ "content": "umb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256190": {
+ "content": "urd_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256191": {
+ "content": "uzn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256192": {
+ "content": "vec_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256193": {
+ "content": "vie_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256194": {
+ "content": "war_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256195": {
+ "content": "wol_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256196": {
+ "content": "xho_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256197": {
+ "content": "ydd_Hebr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256198": {
+ "content": "yor_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256199": {
+ "content": "yue_Hant",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256200": {
+ "content": "zho_Hans",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256201": {
+ "content": "zho_Hant",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256202": {
+ "content": "zul_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256203": {
+ "content": "",
+ "lstrip": true,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "additional_special_tokens": [
+ "ace_Arab",
+ "ace_Latn",
+ "acm_Arab",
+ "acq_Arab",
+ "aeb_Arab",
+ "afr_Latn",
+ "ajp_Arab",
+ "aka_Latn",
+ "amh_Ethi",
+ "apc_Arab",
+ "arb_Arab",
+ "ars_Arab",
+ "ary_Arab",
+ "arz_Arab",
+ "asm_Beng",
+ "ast_Latn",
+ "awa_Deva",
+ "ayr_Latn",
+ "azb_Arab",
+ "azj_Latn",
+ "bak_Cyrl",
+ "bam_Latn",
+ "ban_Latn",
+ "bel_Cyrl",
+ "bem_Latn",
+ "ben_Beng",
+ "bho_Deva",
+ "bjn_Arab",
+ "bjn_Latn",
+ "bod_Tibt",
+ "bos_Latn",
+ "bug_Latn",
+ "bul_Cyrl",
+ "cat_Latn",
+ "ceb_Latn",
+ "ces_Latn",
+ "cjk_Latn",
+ "ckb_Arab",
+ "crh_Latn",
+ "cym_Latn",
+ "dan_Latn",
+ "deu_Latn",
+ "dik_Latn",
+ "dyu_Latn",
+ "dzo_Tibt",
+ "ell_Grek",
+ "eng_Latn",
+ "epo_Latn",
+ "est_Latn",
+ "eus_Latn",
+ "ewe_Latn",
+ "fao_Latn",
+ "pes_Arab",
+ "fij_Latn",
+ "fin_Latn",
+ "fon_Latn",
+ "fra_Latn",
+ "fur_Latn",
+ "fuv_Latn",
+ "gla_Latn",
+ "gle_Latn",
+ "glg_Latn",
+ "grn_Latn",
+ "guj_Gujr",
+ "hat_Latn",
+ "hau_Latn",
+ "heb_Hebr",
+ "hin_Deva",
+ "hne_Deva",
+ "hrv_Latn",
+ "hun_Latn",
+ "hye_Armn",
+ "ibo_Latn",
+ "ilo_Latn",
+ "ind_Latn",
+ "isl_Latn",
+ "ita_Latn",
+ "jav_Latn",
+ "jpn_Jpan",
+ "kab_Latn",
+ "kac_Latn",
+ "kam_Latn",
+ "kan_Knda",
+ "kas_Arab",
+ "kas_Deva",
+ "kat_Geor",
+ "knc_Arab",
+ "knc_Latn",
+ "kaz_Cyrl",
+ "kbp_Latn",
+ "kea_Latn",
+ "khm_Khmr",
+ "kik_Latn",
+ "kin_Latn",
+ "kir_Cyrl",
+ "kmb_Latn",
+ "kon_Latn",
+ "kor_Hang",
+ "kmr_Latn",
+ "lao_Laoo",
+ "lvs_Latn",
+ "lij_Latn",
+ "lim_Latn",
+ "lin_Latn",
+ "lit_Latn",
+ "lmo_Latn",
+ "ltg_Latn",
+ "ltz_Latn",
+ "lua_Latn",
+ "lug_Latn",
+ "luo_Latn",
+ "lus_Latn",
+ "mag_Deva",
+ "mai_Deva",
+ "mal_Mlym",
+ "mar_Deva",
+ "min_Latn",
+ "mkd_Cyrl",
+ "plt_Latn",
+ "mlt_Latn",
+ "mni_Beng",
+ "khk_Cyrl",
+ "mos_Latn",
+ "mri_Latn",
+ "zsm_Latn",
+ "mya_Mymr",
+ "nld_Latn",
+ "nno_Latn",
+ "nob_Latn",
+ "npi_Deva",
+ "nso_Latn",
+ "nus_Latn",
+ "nya_Latn",
+ "oci_Latn",
+ "gaz_Latn",
+ "ory_Orya",
+ "pag_Latn",
+ "pan_Guru",
+ "pap_Latn",
+ "pol_Latn",
+ "por_Latn",
+ "prs_Arab",
+ "pbt_Arab",
+ "quy_Latn",
+ "ron_Latn",
+ "run_Latn",
+ "rus_Cyrl",
+ "sag_Latn",
+ "san_Deva",
+ "sat_Beng",
+ "scn_Latn",
+ "shn_Mymr",
+ "sin_Sinh",
+ "slk_Latn",
+ "slv_Latn",
+ "smo_Latn",
+ "sna_Latn",
+ "snd_Arab",
+ "som_Latn",
+ "sot_Latn",
+ "spa_Latn",
+ "als_Latn",
+ "srd_Latn",
+ "srp_Cyrl",
+ "ssw_Latn",
+ "sun_Latn",
+ "swe_Latn",
+ "swh_Latn",
+ "szl_Latn",
+ "tam_Taml",
+ "tat_Cyrl",
+ "tel_Telu",
+ "tgk_Cyrl",
+ "tgl_Latn",
+ "tha_Thai",
+ "tir_Ethi",
+ "taq_Latn",
+ "taq_Tfng",
+ "tpi_Latn",
+ "tsn_Latn",
+ "tso_Latn",
+ "tuk_Latn",
+ "tum_Latn",
+ "tur_Latn",
+ "twi_Latn",
+ "tzm_Tfng",
+ "uig_Arab",
+ "ukr_Cyrl",
+ "umb_Latn",
+ "urd_Arab",
+ "uzn_Latn",
+ "vec_Latn",
+ "vie_Latn",
+ "war_Latn",
+ "wol_Latn",
+ "xho_Latn",
+ "ydd_Hebr",
+ "yor_Latn",
+ "yue_Hant",
+ "zho_Hans",
+ "zho_Hant",
+ "zul_Latn"
+ ],
+ "bos_token": "",
+ "clean_up_tokenization_spaces": true,
+ "cls_token": "",
+ "eos_token": "",
+ "legacy_behaviour": false,
+ "mask_token": "",
+ "model_max_length": 1024,
+ "pad_token": "",
+ "sep_token": "",
+ "sp_model_kwargs": {},
+ "src_lang": "eng_Latn",
+ "tgt_lang": null,
+ "tokenizer_class": "NllbTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-1000/trainer_state.json b/checkpoint-1000/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..8468a1edc1f52e4fff22291ac3490966c38a7f0d
--- /dev/null
+++ b/checkpoint-1000/trainer_state.json
@@ -0,0 +1,7033 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.0002813291238162,
+ "eval_steps": 500,
+ "global_step": 1000,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.001000281329123816,
+ "grad_norm": 7.503077030181885,
+ "learning_rate": 0.0001999999450590425,
+ "loss": 3.6513,
+ "step": 1
+ },
+ {
+ "epoch": 0.002000562658247632,
+ "grad_norm": 3.1191189289093018,
+ "learning_rate": 0.00019999978023623033,
+ "loss": 2.8683,
+ "step": 2
+ },
+ {
+ "epoch": 0.003000843987371448,
+ "grad_norm": 1.9282511472702026,
+ "learning_rate": 0.0001999995055317446,
+ "loss": 2.7882,
+ "step": 3
+ },
+ {
+ "epoch": 0.004001125316495264,
+ "grad_norm": 1.726026177406311,
+ "learning_rate": 0.00019999912094588717,
+ "loss": 2.7716,
+ "step": 4
+ },
+ {
+ "epoch": 0.005001406645619081,
+ "grad_norm": 1.4632996320724487,
+ "learning_rate": 0.00019999862647908064,
+ "loss": 2.6869,
+ "step": 5
+ },
+ {
+ "epoch": 0.006001687974742896,
+ "grad_norm": 1.5544543266296387,
+ "learning_rate": 0.00019999802213186834,
+ "loss": 2.6952,
+ "step": 6
+ },
+ {
+ "epoch": 0.007001969303866712,
+ "grad_norm": 1.5888980627059937,
+ "learning_rate": 0.0001999973079049143,
+ "loss": 2.1237,
+ "step": 7
+ },
+ {
+ "epoch": 0.008002250632990529,
+ "grad_norm": 1.8750641345977783,
+ "learning_rate": 0.00019999648379900338,
+ "loss": 2.3376,
+ "step": 8
+ },
+ {
+ "epoch": 0.009002531962114344,
+ "grad_norm": 1.0540648698806763,
+ "learning_rate": 0.0001999955498150411,
+ "loss": 2.4896,
+ "step": 9
+ },
+ {
+ "epoch": 0.010002813291238161,
+ "grad_norm": 1.0269274711608887,
+ "learning_rate": 0.00019999450595405374,
+ "loss": 2.1365,
+ "step": 10
+ },
+ {
+ "epoch": 0.011003094620361977,
+ "grad_norm": 1.0851730108261108,
+ "learning_rate": 0.0001999933522171883,
+ "loss": 2.235,
+ "step": 11
+ },
+ {
+ "epoch": 0.012003375949485792,
+ "grad_norm": 0.927042543888092,
+ "learning_rate": 0.00019999208860571255,
+ "loss": 2.2438,
+ "step": 12
+ },
+ {
+ "epoch": 0.01300365727860961,
+ "grad_norm": 1.3729208707809448,
+ "learning_rate": 0.00019999071512101496,
+ "loss": 2.0845,
+ "step": 13
+ },
+ {
+ "epoch": 0.014003938607733425,
+ "grad_norm": 1.1325910091400146,
+ "learning_rate": 0.00019998923176460474,
+ "loss": 2.0668,
+ "step": 14
+ },
+ {
+ "epoch": 0.01500421993685724,
+ "grad_norm": 0.9290457367897034,
+ "learning_rate": 0.00019998763853811184,
+ "loss": 2.0227,
+ "step": 15
+ },
+ {
+ "epoch": 0.016004501265981057,
+ "grad_norm": 0.942140519618988,
+ "learning_rate": 0.00019998593544328692,
+ "loss": 2.1598,
+ "step": 16
+ },
+ {
+ "epoch": 0.017004782595104875,
+ "grad_norm": 1.096635103225708,
+ "learning_rate": 0.00019998412248200138,
+ "loss": 2.1897,
+ "step": 17
+ },
+ {
+ "epoch": 0.01800506392422869,
+ "grad_norm": 1.1107186079025269,
+ "learning_rate": 0.00019998219965624734,
+ "loss": 2.0546,
+ "step": 18
+ },
+ {
+ "epoch": 0.019005345253352506,
+ "grad_norm": 0.9696593880653381,
+ "learning_rate": 0.0001999801669681376,
+ "loss": 2.0317,
+ "step": 19
+ },
+ {
+ "epoch": 0.020005626582476323,
+ "grad_norm": 0.9394300580024719,
+ "learning_rate": 0.00019997802441990573,
+ "loss": 2.2883,
+ "step": 20
+ },
+ {
+ "epoch": 0.021005907911600136,
+ "grad_norm": 1.08865225315094,
+ "learning_rate": 0.00019997577201390606,
+ "loss": 1.9838,
+ "step": 21
+ },
+ {
+ "epoch": 0.022006189240723954,
+ "grad_norm": 1.0712405443191528,
+ "learning_rate": 0.00019997340975261353,
+ "loss": 2.1177,
+ "step": 22
+ },
+ {
+ "epoch": 0.02300647056984777,
+ "grad_norm": 1.3190314769744873,
+ "learning_rate": 0.00019997093763862383,
+ "loss": 1.9755,
+ "step": 23
+ },
+ {
+ "epoch": 0.024006751898971584,
+ "grad_norm": 1.0659812688827515,
+ "learning_rate": 0.0001999683556746534,
+ "loss": 1.9829,
+ "step": 24
+ },
+ {
+ "epoch": 0.0250070332280954,
+ "grad_norm": 1.1824345588684082,
+ "learning_rate": 0.0001999656638635393,
+ "loss": 2.4219,
+ "step": 25
+ },
+ {
+ "epoch": 0.02600731455721922,
+ "grad_norm": 1.3446214199066162,
+ "learning_rate": 0.0001999628622082394,
+ "loss": 1.9644,
+ "step": 26
+ },
+ {
+ "epoch": 0.027007595886343033,
+ "grad_norm": 1.2527475357055664,
+ "learning_rate": 0.0001999599507118322,
+ "loss": 2.1889,
+ "step": 27
+ },
+ {
+ "epoch": 0.02800787721546685,
+ "grad_norm": 1.4738999605178833,
+ "learning_rate": 0.00019995692937751683,
+ "loss": 2.1949,
+ "step": 28
+ },
+ {
+ "epoch": 0.029008158544590667,
+ "grad_norm": 1.0533576011657715,
+ "learning_rate": 0.0001999537982086133,
+ "loss": 2.1034,
+ "step": 29
+ },
+ {
+ "epoch": 0.03000843987371448,
+ "grad_norm": 1.0343223810195923,
+ "learning_rate": 0.00019995055720856218,
+ "loss": 1.9561,
+ "step": 30
+ },
+ {
+ "epoch": 0.031008721202838298,
+ "grad_norm": 1.1149976253509521,
+ "learning_rate": 0.00019994720638092468,
+ "loss": 2.0981,
+ "step": 31
+ },
+ {
+ "epoch": 0.032009002531962115,
+ "grad_norm": 1.197178840637207,
+ "learning_rate": 0.00019994374572938277,
+ "loss": 2.1587,
+ "step": 32
+ },
+ {
+ "epoch": 0.03300928386108593,
+ "grad_norm": 0.9382303953170776,
+ "learning_rate": 0.00019994017525773913,
+ "loss": 1.869,
+ "step": 33
+ },
+ {
+ "epoch": 0.03400956519020975,
+ "grad_norm": 1.0526461601257324,
+ "learning_rate": 0.00019993649496991705,
+ "loss": 1.9045,
+ "step": 34
+ },
+ {
+ "epoch": 0.03500984651933356,
+ "grad_norm": 0.8510498404502869,
+ "learning_rate": 0.00019993270486996046,
+ "loss": 2.1005,
+ "step": 35
+ },
+ {
+ "epoch": 0.03601012784845738,
+ "grad_norm": 0.9990401268005371,
+ "learning_rate": 0.000199928804962034,
+ "loss": 1.8569,
+ "step": 36
+ },
+ {
+ "epoch": 0.037010409177581194,
+ "grad_norm": 0.9243854284286499,
+ "learning_rate": 0.00019992479525042303,
+ "loss": 1.9666,
+ "step": 37
+ },
+ {
+ "epoch": 0.03801069050670501,
+ "grad_norm": 0.7774227261543274,
+ "learning_rate": 0.00019992067573953342,
+ "loss": 2.0376,
+ "step": 38
+ },
+ {
+ "epoch": 0.03901097183582883,
+ "grad_norm": 0.8114833235740662,
+ "learning_rate": 0.0001999164464338918,
+ "loss": 2.1608,
+ "step": 39
+ },
+ {
+ "epoch": 0.040011253164952645,
+ "grad_norm": 0.8716320395469666,
+ "learning_rate": 0.0001999121073381454,
+ "loss": 2.0743,
+ "step": 40
+ },
+ {
+ "epoch": 0.041011534494076456,
+ "grad_norm": 0.9571239948272705,
+ "learning_rate": 0.0001999076584570621,
+ "loss": 2.0128,
+ "step": 41
+ },
+ {
+ "epoch": 0.04201181582320027,
+ "grad_norm": 1.038691520690918,
+ "learning_rate": 0.00019990309979553045,
+ "loss": 1.976,
+ "step": 42
+ },
+ {
+ "epoch": 0.04301209715232409,
+ "grad_norm": 1.0576292276382446,
+ "learning_rate": 0.00019989843135855958,
+ "loss": 1.94,
+ "step": 43
+ },
+ {
+ "epoch": 0.04401237848144791,
+ "grad_norm": 1.0991204977035522,
+ "learning_rate": 0.00019989365315127922,
+ "loss": 1.9397,
+ "step": 44
+ },
+ {
+ "epoch": 0.045012659810571724,
+ "grad_norm": 0.9268686175346375,
+ "learning_rate": 0.0001998887651789398,
+ "loss": 1.9305,
+ "step": 45
+ },
+ {
+ "epoch": 0.04601294113969554,
+ "grad_norm": 0.8459104299545288,
+ "learning_rate": 0.0001998837674469123,
+ "loss": 1.7941,
+ "step": 46
+ },
+ {
+ "epoch": 0.04701322246881936,
+ "grad_norm": 0.9260527491569519,
+ "learning_rate": 0.00019987865996068833,
+ "loss": 1.8843,
+ "step": 47
+ },
+ {
+ "epoch": 0.04801350379794317,
+ "grad_norm": 0.8370497226715088,
+ "learning_rate": 0.00019987344272588006,
+ "loss": 1.8779,
+ "step": 48
+ },
+ {
+ "epoch": 0.049013785127066986,
+ "grad_norm": 0.9228008389472961,
+ "learning_rate": 0.00019986811574822033,
+ "loss": 2.1713,
+ "step": 49
+ },
+ {
+ "epoch": 0.0500140664561908,
+ "grad_norm": 1.013746738433838,
+ "learning_rate": 0.00019986267903356254,
+ "loss": 2.1443,
+ "step": 50
+ },
+ {
+ "epoch": 0.05101434778531462,
+ "grad_norm": 1.0155737400054932,
+ "learning_rate": 0.0001998571325878806,
+ "loss": 1.9679,
+ "step": 51
+ },
+ {
+ "epoch": 0.05201462911443844,
+ "grad_norm": 0.9591345191001892,
+ "learning_rate": 0.0001998514764172691,
+ "loss": 2.0611,
+ "step": 52
+ },
+ {
+ "epoch": 0.053014910443562255,
+ "grad_norm": 0.9030050039291382,
+ "learning_rate": 0.00019984571052794313,
+ "loss": 1.9698,
+ "step": 53
+ },
+ {
+ "epoch": 0.054015191772686065,
+ "grad_norm": 0.7697799205780029,
+ "learning_rate": 0.00019983983492623833,
+ "loss": 2.0609,
+ "step": 54
+ },
+ {
+ "epoch": 0.05501547310180988,
+ "grad_norm": 0.8806005716323853,
+ "learning_rate": 0.00019983384961861096,
+ "loss": 1.9756,
+ "step": 55
+ },
+ {
+ "epoch": 0.0560157544309337,
+ "grad_norm": 0.9424449801445007,
+ "learning_rate": 0.0001998277546116378,
+ "loss": 2.0913,
+ "step": 56
+ },
+ {
+ "epoch": 0.05701603576005752,
+ "grad_norm": 1.139495849609375,
+ "learning_rate": 0.00019982154991201608,
+ "loss": 2.2524,
+ "step": 57
+ },
+ {
+ "epoch": 0.058016317089181334,
+ "grad_norm": 1.094347357749939,
+ "learning_rate": 0.00019981523552656377,
+ "loss": 1.8501,
+ "step": 58
+ },
+ {
+ "epoch": 0.05901659841830515,
+ "grad_norm": 1.1519278287887573,
+ "learning_rate": 0.00019980881146221914,
+ "loss": 1.9866,
+ "step": 59
+ },
+ {
+ "epoch": 0.06001687974742896,
+ "grad_norm": 1.2018250226974487,
+ "learning_rate": 0.00019980227772604112,
+ "loss": 1.8226,
+ "step": 60
+ },
+ {
+ "epoch": 0.06101716107655278,
+ "grad_norm": 0.9565753936767578,
+ "learning_rate": 0.0001997956343252091,
+ "loss": 1.8434,
+ "step": 61
+ },
+ {
+ "epoch": 0.062017442405676595,
+ "grad_norm": 1.0832768678665161,
+ "learning_rate": 0.00019978888126702296,
+ "loss": 2.1271,
+ "step": 62
+ },
+ {
+ "epoch": 0.06301772373480041,
+ "grad_norm": 0.8973837494850159,
+ "learning_rate": 0.00019978201855890308,
+ "loss": 1.8331,
+ "step": 63
+ },
+ {
+ "epoch": 0.06401800506392423,
+ "grad_norm": 0.8754604458808899,
+ "learning_rate": 0.00019977504620839035,
+ "loss": 2.1379,
+ "step": 64
+ },
+ {
+ "epoch": 0.06501828639304805,
+ "grad_norm": 0.8244839310646057,
+ "learning_rate": 0.00019976796422314615,
+ "loss": 1.8431,
+ "step": 65
+ },
+ {
+ "epoch": 0.06601856772217186,
+ "grad_norm": 0.8213551044464111,
+ "learning_rate": 0.00019976077261095226,
+ "loss": 1.9155,
+ "step": 66
+ },
+ {
+ "epoch": 0.06701884905129568,
+ "grad_norm": 0.9140985608100891,
+ "learning_rate": 0.00019975347137971098,
+ "loss": 2.0651,
+ "step": 67
+ },
+ {
+ "epoch": 0.0680191303804195,
+ "grad_norm": 0.8518921732902527,
+ "learning_rate": 0.00019974606053744503,
+ "loss": 1.8197,
+ "step": 68
+ },
+ {
+ "epoch": 0.06901941170954332,
+ "grad_norm": 0.8397145867347717,
+ "learning_rate": 0.00019973854009229763,
+ "loss": 1.8621,
+ "step": 69
+ },
+ {
+ "epoch": 0.07001969303866712,
+ "grad_norm": 0.8727964162826538,
+ "learning_rate": 0.00019973091005253232,
+ "loss": 1.762,
+ "step": 70
+ },
+ {
+ "epoch": 0.07101997436779094,
+ "grad_norm": 0.9284623265266418,
+ "learning_rate": 0.0001997231704265332,
+ "loss": 1.8675,
+ "step": 71
+ },
+ {
+ "epoch": 0.07202025569691475,
+ "grad_norm": 0.8280015587806702,
+ "learning_rate": 0.00019971532122280464,
+ "loss": 1.931,
+ "step": 72
+ },
+ {
+ "epoch": 0.07302053702603857,
+ "grad_norm": 0.7591394186019897,
+ "learning_rate": 0.0001997073624499716,
+ "loss": 1.8485,
+ "step": 73
+ },
+ {
+ "epoch": 0.07402081835516239,
+ "grad_norm": 0.975128710269928,
+ "learning_rate": 0.0001996992941167792,
+ "loss": 2.0784,
+ "step": 74
+ },
+ {
+ "epoch": 0.0750210996842862,
+ "grad_norm": 0.8034948110580444,
+ "learning_rate": 0.00019969111623209323,
+ "loss": 1.9849,
+ "step": 75
+ },
+ {
+ "epoch": 0.07602138101341002,
+ "grad_norm": 0.8540483713150024,
+ "learning_rate": 0.00019968282880489957,
+ "loss": 1.7832,
+ "step": 76
+ },
+ {
+ "epoch": 0.07702166234253384,
+ "grad_norm": 0.8181695342063904,
+ "learning_rate": 0.00019967443184430467,
+ "loss": 1.944,
+ "step": 77
+ },
+ {
+ "epoch": 0.07802194367165766,
+ "grad_norm": 0.8446747064590454,
+ "learning_rate": 0.0001996659253595353,
+ "loss": 1.8508,
+ "step": 78
+ },
+ {
+ "epoch": 0.07902222500078147,
+ "grad_norm": 0.8280364871025085,
+ "learning_rate": 0.0001996573093599385,
+ "loss": 1.843,
+ "step": 79
+ },
+ {
+ "epoch": 0.08002250632990529,
+ "grad_norm": 0.8016006350517273,
+ "learning_rate": 0.00019964858385498172,
+ "loss": 1.9368,
+ "step": 80
+ },
+ {
+ "epoch": 0.08102278765902911,
+ "grad_norm": 0.8450536131858826,
+ "learning_rate": 0.00019963974885425266,
+ "loss": 1.9736,
+ "step": 81
+ },
+ {
+ "epoch": 0.08202306898815291,
+ "grad_norm": 0.9172171950340271,
+ "learning_rate": 0.00019963080436745945,
+ "loss": 1.9382,
+ "step": 82
+ },
+ {
+ "epoch": 0.08302335031727673,
+ "grad_norm": 0.8581916689872742,
+ "learning_rate": 0.00019962175040443044,
+ "loss": 2.224,
+ "step": 83
+ },
+ {
+ "epoch": 0.08402363164640055,
+ "grad_norm": 0.9350367188453674,
+ "learning_rate": 0.0001996125869751143,
+ "loss": 2.0519,
+ "step": 84
+ },
+ {
+ "epoch": 0.08502391297552436,
+ "grad_norm": 0.9276247620582581,
+ "learning_rate": 0.00019960331408957997,
+ "loss": 1.9657,
+ "step": 85
+ },
+ {
+ "epoch": 0.08602419430464818,
+ "grad_norm": 0.871574342250824,
+ "learning_rate": 0.00019959393175801671,
+ "loss": 1.9399,
+ "step": 86
+ },
+ {
+ "epoch": 0.087024475633772,
+ "grad_norm": 1.0662888288497925,
+ "learning_rate": 0.00019958443999073397,
+ "loss": 1.9089,
+ "step": 87
+ },
+ {
+ "epoch": 0.08802475696289581,
+ "grad_norm": 0.8258713483810425,
+ "learning_rate": 0.00019957483879816151,
+ "loss": 1.839,
+ "step": 88
+ },
+ {
+ "epoch": 0.08902503829201963,
+ "grad_norm": 0.8154664039611816,
+ "learning_rate": 0.00019956512819084928,
+ "loss": 1.8409,
+ "step": 89
+ },
+ {
+ "epoch": 0.09002531962114345,
+ "grad_norm": 0.8584638833999634,
+ "learning_rate": 0.00019955530817946748,
+ "loss": 1.9521,
+ "step": 90
+ },
+ {
+ "epoch": 0.09102560095026727,
+ "grad_norm": 0.7917523384094238,
+ "learning_rate": 0.00019954537877480655,
+ "loss": 1.9495,
+ "step": 91
+ },
+ {
+ "epoch": 0.09202588227939108,
+ "grad_norm": 1.0129039287567139,
+ "learning_rate": 0.00019953533998777706,
+ "loss": 1.949,
+ "step": 92
+ },
+ {
+ "epoch": 0.0930261636085149,
+ "grad_norm": 0.8677986264228821,
+ "learning_rate": 0.00019952519182940993,
+ "loss": 1.7875,
+ "step": 93
+ },
+ {
+ "epoch": 0.09402644493763872,
+ "grad_norm": 0.8848614692687988,
+ "learning_rate": 0.00019951493431085603,
+ "loss": 2.0675,
+ "step": 94
+ },
+ {
+ "epoch": 0.09502672626676252,
+ "grad_norm": 0.9936463832855225,
+ "learning_rate": 0.00019950456744338658,
+ "loss": 1.6761,
+ "step": 95
+ },
+ {
+ "epoch": 0.09602700759588634,
+ "grad_norm": 1.0520148277282715,
+ "learning_rate": 0.00019949409123839288,
+ "loss": 2.081,
+ "step": 96
+ },
+ {
+ "epoch": 0.09702728892501016,
+ "grad_norm": 0.8061773180961609,
+ "learning_rate": 0.00019948350570738642,
+ "loss": 1.7281,
+ "step": 97
+ },
+ {
+ "epoch": 0.09802757025413397,
+ "grad_norm": 0.7642756104469299,
+ "learning_rate": 0.0001994728108619987,
+ "loss": 2.0032,
+ "step": 98
+ },
+ {
+ "epoch": 0.09902785158325779,
+ "grad_norm": 0.8541550040245056,
+ "learning_rate": 0.0001994620067139815,
+ "loss": 2.1136,
+ "step": 99
+ },
+ {
+ "epoch": 0.1000281329123816,
+ "grad_norm": 0.7868679761886597,
+ "learning_rate": 0.00019945109327520658,
+ "loss": 1.8695,
+ "step": 100
+ },
+ {
+ "epoch": 0.10102841424150542,
+ "grad_norm": 0.8776901364326477,
+ "learning_rate": 0.00019944007055766586,
+ "loss": 1.9786,
+ "step": 101
+ },
+ {
+ "epoch": 0.10202869557062924,
+ "grad_norm": 0.9013833999633789,
+ "learning_rate": 0.00019942893857347128,
+ "loss": 2.1466,
+ "step": 102
+ },
+ {
+ "epoch": 0.10302897689975306,
+ "grad_norm": 0.957558274269104,
+ "learning_rate": 0.00019941769733485494,
+ "loss": 2.0473,
+ "step": 103
+ },
+ {
+ "epoch": 0.10402925822887688,
+ "grad_norm": 0.8921108841896057,
+ "learning_rate": 0.00019940634685416888,
+ "loss": 1.7882,
+ "step": 104
+ },
+ {
+ "epoch": 0.10502953955800069,
+ "grad_norm": 0.896019697189331,
+ "learning_rate": 0.00019939488714388524,
+ "loss": 1.8811,
+ "step": 105
+ },
+ {
+ "epoch": 0.10602982088712451,
+ "grad_norm": 0.8792067766189575,
+ "learning_rate": 0.00019938331821659614,
+ "loss": 1.8624,
+ "step": 106
+ },
+ {
+ "epoch": 0.10703010221624833,
+ "grad_norm": 0.8739930391311646,
+ "learning_rate": 0.0001993716400850138,
+ "loss": 1.8105,
+ "step": 107
+ },
+ {
+ "epoch": 0.10803038354537213,
+ "grad_norm": 0.7678424715995789,
+ "learning_rate": 0.0001993598527619703,
+ "loss": 1.8772,
+ "step": 108
+ },
+ {
+ "epoch": 0.10903066487449595,
+ "grad_norm": 0.8718745112419128,
+ "learning_rate": 0.00019934795626041783,
+ "loss": 1.8236,
+ "step": 109
+ },
+ {
+ "epoch": 0.11003094620361976,
+ "grad_norm": 0.8467247486114502,
+ "learning_rate": 0.0001993359505934285,
+ "loss": 1.8188,
+ "step": 110
+ },
+ {
+ "epoch": 0.11103122753274358,
+ "grad_norm": 0.8685783743858337,
+ "learning_rate": 0.00019932383577419432,
+ "loss": 2.0775,
+ "step": 111
+ },
+ {
+ "epoch": 0.1120315088618674,
+ "grad_norm": 0.7799698710441589,
+ "learning_rate": 0.0001993116118160273,
+ "loss": 1.6489,
+ "step": 112
+ },
+ {
+ "epoch": 0.11303179019099122,
+ "grad_norm": 0.7900094389915466,
+ "learning_rate": 0.00019929927873235938,
+ "loss": 1.8332,
+ "step": 113
+ },
+ {
+ "epoch": 0.11403207152011503,
+ "grad_norm": 0.9433258771896362,
+ "learning_rate": 0.00019928683653674237,
+ "loss": 1.9331,
+ "step": 114
+ },
+ {
+ "epoch": 0.11503235284923885,
+ "grad_norm": 0.8861056566238403,
+ "learning_rate": 0.00019927428524284805,
+ "loss": 1.9135,
+ "step": 115
+ },
+ {
+ "epoch": 0.11603263417836267,
+ "grad_norm": 0.8566756844520569,
+ "learning_rate": 0.00019926162486446792,
+ "loss": 1.9874,
+ "step": 116
+ },
+ {
+ "epoch": 0.11703291550748648,
+ "grad_norm": 0.6897929310798645,
+ "learning_rate": 0.0001992488554155135,
+ "loss": 1.946,
+ "step": 117
+ },
+ {
+ "epoch": 0.1180331968366103,
+ "grad_norm": 0.7807729244232178,
+ "learning_rate": 0.00019923597691001615,
+ "loss": 1.8127,
+ "step": 118
+ },
+ {
+ "epoch": 0.11903347816573412,
+ "grad_norm": 0.7572523355484009,
+ "learning_rate": 0.0001992229893621269,
+ "loss": 1.7768,
+ "step": 119
+ },
+ {
+ "epoch": 0.12003375949485792,
+ "grad_norm": 0.7393172979354858,
+ "learning_rate": 0.00019920989278611687,
+ "loss": 1.894,
+ "step": 120
+ },
+ {
+ "epoch": 0.12103404082398174,
+ "grad_norm": 0.866576611995697,
+ "learning_rate": 0.0001991966871963767,
+ "loss": 1.9285,
+ "step": 121
+ },
+ {
+ "epoch": 0.12203432215310556,
+ "grad_norm": 0.7326533794403076,
+ "learning_rate": 0.000199183372607417,
+ "loss": 1.9309,
+ "step": 122
+ },
+ {
+ "epoch": 0.12303460348222937,
+ "grad_norm": 0.7655537724494934,
+ "learning_rate": 0.0001991699490338681,
+ "loss": 2.1145,
+ "step": 123
+ },
+ {
+ "epoch": 0.12403488481135319,
+ "grad_norm": 0.9714633226394653,
+ "learning_rate": 0.00019915641649048005,
+ "loss": 2.0341,
+ "step": 124
+ },
+ {
+ "epoch": 0.12503516614047702,
+ "grad_norm": 0.8542420864105225,
+ "learning_rate": 0.0001991427749921227,
+ "loss": 2.1426,
+ "step": 125
+ },
+ {
+ "epoch": 0.12603544746960083,
+ "grad_norm": 0.8286274671554565,
+ "learning_rate": 0.00019912902455378556,
+ "loss": 1.8452,
+ "step": 126
+ },
+ {
+ "epoch": 0.12703572879872463,
+ "grad_norm": 0.8823768496513367,
+ "learning_rate": 0.00019911516519057788,
+ "loss": 1.8651,
+ "step": 127
+ },
+ {
+ "epoch": 0.12803601012784846,
+ "grad_norm": 0.7301567196846008,
+ "learning_rate": 0.00019910119691772863,
+ "loss": 1.7776,
+ "step": 128
+ },
+ {
+ "epoch": 0.12903629145697226,
+ "grad_norm": 0.8402552604675293,
+ "learning_rate": 0.00019908711975058637,
+ "loss": 1.8617,
+ "step": 129
+ },
+ {
+ "epoch": 0.1300365727860961,
+ "grad_norm": 0.814500093460083,
+ "learning_rate": 0.0001990729337046194,
+ "loss": 1.9156,
+ "step": 130
+ },
+ {
+ "epoch": 0.1310368541152199,
+ "grad_norm": 0.8262699246406555,
+ "learning_rate": 0.0001990586387954156,
+ "loss": 1.8659,
+ "step": 131
+ },
+ {
+ "epoch": 0.13203713544434373,
+ "grad_norm": 0.8846324682235718,
+ "learning_rate": 0.00019904423503868247,
+ "loss": 2.043,
+ "step": 132
+ },
+ {
+ "epoch": 0.13303741677346753,
+ "grad_norm": 0.8757227659225464,
+ "learning_rate": 0.00019902972245024715,
+ "loss": 1.9217,
+ "step": 133
+ },
+ {
+ "epoch": 0.13403769810259136,
+ "grad_norm": 0.8476879596710205,
+ "learning_rate": 0.00019901510104605637,
+ "loss": 1.8892,
+ "step": 134
+ },
+ {
+ "epoch": 0.13503797943171517,
+ "grad_norm": 0.7707583904266357,
+ "learning_rate": 0.00019900037084217637,
+ "loss": 1.787,
+ "step": 135
+ },
+ {
+ "epoch": 0.136038260760839,
+ "grad_norm": 0.7389562129974365,
+ "learning_rate": 0.00019898553185479303,
+ "loss": 1.5854,
+ "step": 136
+ },
+ {
+ "epoch": 0.1370385420899628,
+ "grad_norm": 0.7331375479698181,
+ "learning_rate": 0.00019897058410021167,
+ "loss": 1.997,
+ "step": 137
+ },
+ {
+ "epoch": 0.13803882341908663,
+ "grad_norm": 0.7219388484954834,
+ "learning_rate": 0.00019895552759485722,
+ "loss": 1.8337,
+ "step": 138
+ },
+ {
+ "epoch": 0.13903910474821043,
+ "grad_norm": 0.8535702228546143,
+ "learning_rate": 0.00019894036235527395,
+ "loss": 1.7818,
+ "step": 139
+ },
+ {
+ "epoch": 0.14003938607733424,
+ "grad_norm": 0.7627841830253601,
+ "learning_rate": 0.00019892508839812584,
+ "loss": 1.8173,
+ "step": 140
+ },
+ {
+ "epoch": 0.14103966740645807,
+ "grad_norm": 0.8397619724273682,
+ "learning_rate": 0.00019890970574019617,
+ "loss": 1.8735,
+ "step": 141
+ },
+ {
+ "epoch": 0.14203994873558187,
+ "grad_norm": 0.8093482851982117,
+ "learning_rate": 0.00019889421439838763,
+ "loss": 1.9918,
+ "step": 142
+ },
+ {
+ "epoch": 0.1430402300647057,
+ "grad_norm": 0.8853684067726135,
+ "learning_rate": 0.00019887861438972246,
+ "loss": 1.5825,
+ "step": 143
+ },
+ {
+ "epoch": 0.1440405113938295,
+ "grad_norm": 0.7413788437843323,
+ "learning_rate": 0.00019886290573134228,
+ "loss": 1.9068,
+ "step": 144
+ },
+ {
+ "epoch": 0.14504079272295334,
+ "grad_norm": 0.7924477458000183,
+ "learning_rate": 0.000198847088440508,
+ "loss": 1.8582,
+ "step": 145
+ },
+ {
+ "epoch": 0.14604107405207714,
+ "grad_norm": 0.8679131865501404,
+ "learning_rate": 0.0001988311625346,
+ "loss": 1.7104,
+ "step": 146
+ },
+ {
+ "epoch": 0.14704135538120097,
+ "grad_norm": 0.7480150461196899,
+ "learning_rate": 0.00019881512803111796,
+ "loss": 1.7288,
+ "step": 147
+ },
+ {
+ "epoch": 0.14804163671032478,
+ "grad_norm": 0.8382390737533569,
+ "learning_rate": 0.00019879898494768093,
+ "loss": 1.8004,
+ "step": 148
+ },
+ {
+ "epoch": 0.1490419180394486,
+ "grad_norm": 0.7360037565231323,
+ "learning_rate": 0.00019878273330202717,
+ "loss": 1.85,
+ "step": 149
+ },
+ {
+ "epoch": 0.1500421993685724,
+ "grad_norm": 0.9644019603729248,
+ "learning_rate": 0.00019876637311201433,
+ "loss": 2.065,
+ "step": 150
+ },
+ {
+ "epoch": 0.15104248069769624,
+ "grad_norm": 0.8116248250007629,
+ "learning_rate": 0.00019874990439561934,
+ "loss": 1.702,
+ "step": 151
+ },
+ {
+ "epoch": 0.15204276202682004,
+ "grad_norm": 0.9301722645759583,
+ "learning_rate": 0.0001987333271709383,
+ "loss": 1.8089,
+ "step": 152
+ },
+ {
+ "epoch": 0.15304304335594385,
+ "grad_norm": 0.7991555333137512,
+ "learning_rate": 0.00019871664145618657,
+ "loss": 1.8227,
+ "step": 153
+ },
+ {
+ "epoch": 0.15404332468506768,
+ "grad_norm": 0.8676092028617859,
+ "learning_rate": 0.00019869984726969878,
+ "loss": 1.7253,
+ "step": 154
+ },
+ {
+ "epoch": 0.15504360601419148,
+ "grad_norm": 0.8022972941398621,
+ "learning_rate": 0.00019868294462992866,
+ "loss": 1.8766,
+ "step": 155
+ },
+ {
+ "epoch": 0.1560438873433153,
+ "grad_norm": 1.128886103630066,
+ "learning_rate": 0.00019866593355544922,
+ "loss": 2.0197,
+ "step": 156
+ },
+ {
+ "epoch": 0.15704416867243912,
+ "grad_norm": 0.7420483827590942,
+ "learning_rate": 0.00019864881406495246,
+ "loss": 1.8825,
+ "step": 157
+ },
+ {
+ "epoch": 0.15804445000156295,
+ "grad_norm": 0.7797536849975586,
+ "learning_rate": 0.00019863158617724967,
+ "loss": 1.8892,
+ "step": 158
+ },
+ {
+ "epoch": 0.15904473133068675,
+ "grad_norm": 0.6859965324401855,
+ "learning_rate": 0.00019861424991127115,
+ "loss": 1.8424,
+ "step": 159
+ },
+ {
+ "epoch": 0.16004501265981058,
+ "grad_norm": 0.8115108609199524,
+ "learning_rate": 0.00019859680528606637,
+ "loss": 1.8394,
+ "step": 160
+ },
+ {
+ "epoch": 0.16104529398893438,
+ "grad_norm": 0.9756322503089905,
+ "learning_rate": 0.00019857925232080373,
+ "loss": 1.726,
+ "step": 161
+ },
+ {
+ "epoch": 0.16204557531805822,
+ "grad_norm": 0.8894350528717041,
+ "learning_rate": 0.00019856159103477086,
+ "loss": 1.8893,
+ "step": 162
+ },
+ {
+ "epoch": 0.16304585664718202,
+ "grad_norm": 0.8075819611549377,
+ "learning_rate": 0.00019854382144737426,
+ "loss": 1.6596,
+ "step": 163
+ },
+ {
+ "epoch": 0.16404613797630582,
+ "grad_norm": 0.8861923813819885,
+ "learning_rate": 0.00019852594357813952,
+ "loss": 1.9352,
+ "step": 164
+ },
+ {
+ "epoch": 0.16504641930542965,
+ "grad_norm": 0.8511936068534851,
+ "learning_rate": 0.00019850795744671116,
+ "loss": 1.9416,
+ "step": 165
+ },
+ {
+ "epoch": 0.16604670063455346,
+ "grad_norm": 0.9425658583641052,
+ "learning_rate": 0.0001984898630728527,
+ "loss": 1.9081,
+ "step": 166
+ },
+ {
+ "epoch": 0.1670469819636773,
+ "grad_norm": 0.7502055168151855,
+ "learning_rate": 0.0001984716604764466,
+ "loss": 1.703,
+ "step": 167
+ },
+ {
+ "epoch": 0.1680472632928011,
+ "grad_norm": 0.9135978817939758,
+ "learning_rate": 0.0001984533496774942,
+ "loss": 1.7641,
+ "step": 168
+ },
+ {
+ "epoch": 0.16904754462192492,
+ "grad_norm": 0.7768126726150513,
+ "learning_rate": 0.0001984349306961158,
+ "loss": 1.7053,
+ "step": 169
+ },
+ {
+ "epoch": 0.17004782595104873,
+ "grad_norm": 0.8106538653373718,
+ "learning_rate": 0.00019841640355255043,
+ "loss": 1.8646,
+ "step": 170
+ },
+ {
+ "epoch": 0.17104810728017256,
+ "grad_norm": 0.7872330546379089,
+ "learning_rate": 0.00019839776826715614,
+ "loss": 1.7814,
+ "step": 171
+ },
+ {
+ "epoch": 0.17204838860929636,
+ "grad_norm": 0.869532585144043,
+ "learning_rate": 0.00019837902486040978,
+ "loss": 1.7812,
+ "step": 172
+ },
+ {
+ "epoch": 0.1730486699384202,
+ "grad_norm": 1.015028715133667,
+ "learning_rate": 0.0001983601733529069,
+ "loss": 1.9432,
+ "step": 173
+ },
+ {
+ "epoch": 0.174048951267544,
+ "grad_norm": 0.800183117389679,
+ "learning_rate": 0.00019834121376536187,
+ "loss": 1.758,
+ "step": 174
+ },
+ {
+ "epoch": 0.17504923259666783,
+ "grad_norm": 0.7427104711532593,
+ "learning_rate": 0.00019832214611860793,
+ "loss": 1.6476,
+ "step": 175
+ },
+ {
+ "epoch": 0.17604951392579163,
+ "grad_norm": 0.8289130926132202,
+ "learning_rate": 0.00019830297043359692,
+ "loss": 1.7702,
+ "step": 176
+ },
+ {
+ "epoch": 0.17704979525491543,
+ "grad_norm": 0.8298771977424622,
+ "learning_rate": 0.00019828368673139947,
+ "loss": 1.7515,
+ "step": 177
+ },
+ {
+ "epoch": 0.17805007658403926,
+ "grad_norm": 0.7602815628051758,
+ "learning_rate": 0.0001982642950332049,
+ "loss": 1.7545,
+ "step": 178
+ },
+ {
+ "epoch": 0.17905035791316307,
+ "grad_norm": 0.8110321164131165,
+ "learning_rate": 0.00019824479536032112,
+ "loss": 2.2604,
+ "step": 179
+ },
+ {
+ "epoch": 0.1800506392422869,
+ "grad_norm": 0.882273256778717,
+ "learning_rate": 0.0001982251877341748,
+ "loss": 1.8133,
+ "step": 180
+ },
+ {
+ "epoch": 0.1810509205714107,
+ "grad_norm": 0.9015639424324036,
+ "learning_rate": 0.00019820547217631117,
+ "loss": 1.7282,
+ "step": 181
+ },
+ {
+ "epoch": 0.18205120190053453,
+ "grad_norm": 0.9231659173965454,
+ "learning_rate": 0.00019818564870839405,
+ "loss": 1.9094,
+ "step": 182
+ },
+ {
+ "epoch": 0.18305148322965833,
+ "grad_norm": 0.8110967874526978,
+ "learning_rate": 0.00019816571735220583,
+ "loss": 1.886,
+ "step": 183
+ },
+ {
+ "epoch": 0.18405176455878217,
+ "grad_norm": 0.7670036554336548,
+ "learning_rate": 0.00019814567812964748,
+ "loss": 1.9895,
+ "step": 184
+ },
+ {
+ "epoch": 0.18505204588790597,
+ "grad_norm": 0.7955975532531738,
+ "learning_rate": 0.00019812553106273847,
+ "loss": 1.8127,
+ "step": 185
+ },
+ {
+ "epoch": 0.1860523272170298,
+ "grad_norm": 0.8790062665939331,
+ "learning_rate": 0.00019810527617361681,
+ "loss": 1.8899,
+ "step": 186
+ },
+ {
+ "epoch": 0.1870526085461536,
+ "grad_norm": 0.8818586468696594,
+ "learning_rate": 0.00019808491348453894,
+ "loss": 1.7707,
+ "step": 187
+ },
+ {
+ "epoch": 0.18805288987527743,
+ "grad_norm": 0.746442437171936,
+ "learning_rate": 0.00019806444301787978,
+ "loss": 1.7281,
+ "step": 188
+ },
+ {
+ "epoch": 0.18905317120440124,
+ "grad_norm": 0.7786905169487,
+ "learning_rate": 0.0001980438647961327,
+ "loss": 1.7317,
+ "step": 189
+ },
+ {
+ "epoch": 0.19005345253352504,
+ "grad_norm": 0.9338862299919128,
+ "learning_rate": 0.00019802317884190935,
+ "loss": 1.9548,
+ "step": 190
+ },
+ {
+ "epoch": 0.19105373386264887,
+ "grad_norm": 0.7416581511497498,
+ "learning_rate": 0.00019800238517793996,
+ "loss": 1.8601,
+ "step": 191
+ },
+ {
+ "epoch": 0.19205401519177268,
+ "grad_norm": 0.6782898902893066,
+ "learning_rate": 0.00019798148382707296,
+ "loss": 1.8477,
+ "step": 192
+ },
+ {
+ "epoch": 0.1930542965208965,
+ "grad_norm": 0.7389237880706787,
+ "learning_rate": 0.00019796047481227515,
+ "loss": 1.7749,
+ "step": 193
+ },
+ {
+ "epoch": 0.1940545778500203,
+ "grad_norm": 0.9711095094680786,
+ "learning_rate": 0.00019793935815663163,
+ "loss": 2.0899,
+ "step": 194
+ },
+ {
+ "epoch": 0.19505485917914414,
+ "grad_norm": 0.7949391007423401,
+ "learning_rate": 0.00019791813388334581,
+ "loss": 1.8778,
+ "step": 195
+ },
+ {
+ "epoch": 0.19605514050826794,
+ "grad_norm": 0.8871057033538818,
+ "learning_rate": 0.00019789680201573933,
+ "loss": 1.7511,
+ "step": 196
+ },
+ {
+ "epoch": 0.19705542183739178,
+ "grad_norm": 0.8664624094963074,
+ "learning_rate": 0.00019787536257725202,
+ "loss": 1.7232,
+ "step": 197
+ },
+ {
+ "epoch": 0.19805570316651558,
+ "grad_norm": 0.871658980846405,
+ "learning_rate": 0.00019785381559144196,
+ "loss": 1.7987,
+ "step": 198
+ },
+ {
+ "epoch": 0.1990559844956394,
+ "grad_norm": 0.7748361229896545,
+ "learning_rate": 0.00019783216108198542,
+ "loss": 1.9239,
+ "step": 199
+ },
+ {
+ "epoch": 0.2000562658247632,
+ "grad_norm": 0.9393408298492432,
+ "learning_rate": 0.00019781039907267677,
+ "loss": 2.0936,
+ "step": 200
+ },
+ {
+ "epoch": 0.20105654715388704,
+ "grad_norm": 0.8519601225852966,
+ "learning_rate": 0.00019778852958742853,
+ "loss": 1.9108,
+ "step": 201
+ },
+ {
+ "epoch": 0.20205682848301085,
+ "grad_norm": 0.8464863300323486,
+ "learning_rate": 0.00019776655265027127,
+ "loss": 1.897,
+ "step": 202
+ },
+ {
+ "epoch": 0.20305710981213465,
+ "grad_norm": 0.8933351635932922,
+ "learning_rate": 0.00019774446828535371,
+ "loss": 1.8204,
+ "step": 203
+ },
+ {
+ "epoch": 0.20405739114125848,
+ "grad_norm": 0.8305785059928894,
+ "learning_rate": 0.00019772227651694256,
+ "loss": 1.9135,
+ "step": 204
+ },
+ {
+ "epoch": 0.20505767247038229,
+ "grad_norm": 0.8117037415504456,
+ "learning_rate": 0.00019769997736942258,
+ "loss": 1.7585,
+ "step": 205
+ },
+ {
+ "epoch": 0.20605795379950612,
+ "grad_norm": 0.7570348381996155,
+ "learning_rate": 0.00019767757086729647,
+ "loss": 1.8373,
+ "step": 206
+ },
+ {
+ "epoch": 0.20705823512862992,
+ "grad_norm": 0.9291234016418457,
+ "learning_rate": 0.00019765505703518496,
+ "loss": 1.7774,
+ "step": 207
+ },
+ {
+ "epoch": 0.20805851645775375,
+ "grad_norm": 0.8211004137992859,
+ "learning_rate": 0.00019763243589782662,
+ "loss": 1.8766,
+ "step": 208
+ },
+ {
+ "epoch": 0.20905879778687755,
+ "grad_norm": 0.6625431180000305,
+ "learning_rate": 0.00019760970748007803,
+ "loss": 1.628,
+ "step": 209
+ },
+ {
+ "epoch": 0.21005907911600138,
+ "grad_norm": 0.7974782586097717,
+ "learning_rate": 0.0001975868718069136,
+ "loss": 1.6896,
+ "step": 210
+ },
+ {
+ "epoch": 0.2110593604451252,
+ "grad_norm": 0.8364912867546082,
+ "learning_rate": 0.00019756392890342563,
+ "loss": 1.7492,
+ "step": 211
+ },
+ {
+ "epoch": 0.21205964177424902,
+ "grad_norm": 0.8730652332305908,
+ "learning_rate": 0.00019754087879482422,
+ "loss": 1.8295,
+ "step": 212
+ },
+ {
+ "epoch": 0.21305992310337282,
+ "grad_norm": 0.7532863020896912,
+ "learning_rate": 0.00019751772150643722,
+ "loss": 1.8309,
+ "step": 213
+ },
+ {
+ "epoch": 0.21406020443249665,
+ "grad_norm": 0.7375178933143616,
+ "learning_rate": 0.00019749445706371038,
+ "loss": 1.7854,
+ "step": 214
+ },
+ {
+ "epoch": 0.21506048576162046,
+ "grad_norm": 0.7524377703666687,
+ "learning_rate": 0.00019747108549220702,
+ "loss": 1.7683,
+ "step": 215
+ },
+ {
+ "epoch": 0.21606076709074426,
+ "grad_norm": 0.7331809997558594,
+ "learning_rate": 0.00019744760681760832,
+ "loss": 1.7103,
+ "step": 216
+ },
+ {
+ "epoch": 0.2170610484198681,
+ "grad_norm": 0.8083691596984863,
+ "learning_rate": 0.00019742402106571314,
+ "loss": 1.674,
+ "step": 217
+ },
+ {
+ "epoch": 0.2180613297489919,
+ "grad_norm": 0.8524570465087891,
+ "learning_rate": 0.00019740032826243788,
+ "loss": 1.7227,
+ "step": 218
+ },
+ {
+ "epoch": 0.21906161107811573,
+ "grad_norm": 0.7676658630371094,
+ "learning_rate": 0.0001973765284338167,
+ "loss": 1.8561,
+ "step": 219
+ },
+ {
+ "epoch": 0.22006189240723953,
+ "grad_norm": 0.7858710289001465,
+ "learning_rate": 0.00019735262160600127,
+ "loss": 1.7796,
+ "step": 220
+ },
+ {
+ "epoch": 0.22106217373636336,
+ "grad_norm": 0.7587497234344482,
+ "learning_rate": 0.00019732860780526088,
+ "loss": 1.9271,
+ "step": 221
+ },
+ {
+ "epoch": 0.22206245506548716,
+ "grad_norm": 0.8084688186645508,
+ "learning_rate": 0.00019730448705798239,
+ "loss": 1.8176,
+ "step": 222
+ },
+ {
+ "epoch": 0.223062736394611,
+ "grad_norm": 0.6736906170845032,
+ "learning_rate": 0.00019728025939067008,
+ "loss": 1.6288,
+ "step": 223
+ },
+ {
+ "epoch": 0.2240630177237348,
+ "grad_norm": 0.7483925819396973,
+ "learning_rate": 0.00019725592482994583,
+ "loss": 1.8363,
+ "step": 224
+ },
+ {
+ "epoch": 0.22506329905285863,
+ "grad_norm": 1.7995796203613281,
+ "learning_rate": 0.00019723148340254892,
+ "loss": 1.9072,
+ "step": 225
+ },
+ {
+ "epoch": 0.22606358038198243,
+ "grad_norm": 0.8028881549835205,
+ "learning_rate": 0.00019720693513533598,
+ "loss": 1.9021,
+ "step": 226
+ },
+ {
+ "epoch": 0.22706386171110624,
+ "grad_norm": 0.9853909015655518,
+ "learning_rate": 0.00019718228005528122,
+ "loss": 2.0159,
+ "step": 227
+ },
+ {
+ "epoch": 0.22806414304023007,
+ "grad_norm": 0.7784947156906128,
+ "learning_rate": 0.00019715751818947603,
+ "loss": 1.7816,
+ "step": 228
+ },
+ {
+ "epoch": 0.22906442436935387,
+ "grad_norm": 0.7447614669799805,
+ "learning_rate": 0.0001971326495651293,
+ "loss": 1.654,
+ "step": 229
+ },
+ {
+ "epoch": 0.2300647056984777,
+ "grad_norm": 0.8673064112663269,
+ "learning_rate": 0.00019710767420956705,
+ "loss": 2.0049,
+ "step": 230
+ },
+ {
+ "epoch": 0.2310649870276015,
+ "grad_norm": 0.8207747936248779,
+ "learning_rate": 0.0001970825921502328,
+ "loss": 1.9388,
+ "step": 231
+ },
+ {
+ "epoch": 0.23206526835672533,
+ "grad_norm": 0.742266058921814,
+ "learning_rate": 0.0001970574034146871,
+ "loss": 1.7658,
+ "step": 232
+ },
+ {
+ "epoch": 0.23306554968584914,
+ "grad_norm": 0.9097973704338074,
+ "learning_rate": 0.00019703210803060782,
+ "loss": 1.8023,
+ "step": 233
+ },
+ {
+ "epoch": 0.23406583101497297,
+ "grad_norm": 0.7512438297271729,
+ "learning_rate": 0.00019700670602579008,
+ "loss": 1.8551,
+ "step": 234
+ },
+ {
+ "epoch": 0.23506611234409677,
+ "grad_norm": 0.8303943872451782,
+ "learning_rate": 0.00019698119742814606,
+ "loss": 1.7723,
+ "step": 235
+ },
+ {
+ "epoch": 0.2360663936732206,
+ "grad_norm": 0.9195139408111572,
+ "learning_rate": 0.00019695558226570507,
+ "loss": 1.6426,
+ "step": 236
+ },
+ {
+ "epoch": 0.2370666750023444,
+ "grad_norm": 0.7734714150428772,
+ "learning_rate": 0.00019692986056661356,
+ "loss": 1.7798,
+ "step": 237
+ },
+ {
+ "epoch": 0.23806695633146824,
+ "grad_norm": 0.8759648203849792,
+ "learning_rate": 0.00019690403235913504,
+ "loss": 1.6465,
+ "step": 238
+ },
+ {
+ "epoch": 0.23906723766059204,
+ "grad_norm": 0.7688003778457642,
+ "learning_rate": 0.00019687809767165,
+ "loss": 2.0092,
+ "step": 239
+ },
+ {
+ "epoch": 0.24006751898971584,
+ "grad_norm": 0.7398790121078491,
+ "learning_rate": 0.000196852056532656,
+ "loss": 1.8176,
+ "step": 240
+ },
+ {
+ "epoch": 0.24106780031883968,
+ "grad_norm": 0.8921257853507996,
+ "learning_rate": 0.00019682590897076752,
+ "loss": 1.7387,
+ "step": 241
+ },
+ {
+ "epoch": 0.24206808164796348,
+ "grad_norm": 0.7939002513885498,
+ "learning_rate": 0.00019679965501471608,
+ "loss": 1.9417,
+ "step": 242
+ },
+ {
+ "epoch": 0.2430683629770873,
+ "grad_norm": 0.7798025608062744,
+ "learning_rate": 0.0001967732946933499,
+ "loss": 1.7134,
+ "step": 243
+ },
+ {
+ "epoch": 0.2440686443062111,
+ "grad_norm": 0.8007254600524902,
+ "learning_rate": 0.00019674682803563428,
+ "loss": 1.7387,
+ "step": 244
+ },
+ {
+ "epoch": 0.24506892563533494,
+ "grad_norm": 0.6257696151733398,
+ "learning_rate": 0.00019672025507065131,
+ "loss": 1.767,
+ "step": 245
+ },
+ {
+ "epoch": 0.24606920696445875,
+ "grad_norm": 0.7942785620689392,
+ "learning_rate": 0.00019669357582759983,
+ "loss": 1.8801,
+ "step": 246
+ },
+ {
+ "epoch": 0.24706948829358258,
+ "grad_norm": 0.7933829426765442,
+ "learning_rate": 0.00019666679033579552,
+ "loss": 1.9711,
+ "step": 247
+ },
+ {
+ "epoch": 0.24806976962270638,
+ "grad_norm": 0.7489326596260071,
+ "learning_rate": 0.00019663989862467082,
+ "loss": 1.8038,
+ "step": 248
+ },
+ {
+ "epoch": 0.2490700509518302,
+ "grad_norm": 0.7279101014137268,
+ "learning_rate": 0.00019661290072377482,
+ "loss": 1.66,
+ "step": 249
+ },
+ {
+ "epoch": 0.25007033228095404,
+ "grad_norm": 0.6823874115943909,
+ "learning_rate": 0.00019658579666277334,
+ "loss": 1.8064,
+ "step": 250
+ },
+ {
+ "epoch": 0.2510706136100778,
+ "grad_norm": 0.6561273336410522,
+ "learning_rate": 0.0001965585864714488,
+ "loss": 1.6874,
+ "step": 251
+ },
+ {
+ "epoch": 0.25207089493920165,
+ "grad_norm": 0.6457573175430298,
+ "learning_rate": 0.00019653127017970034,
+ "loss": 1.4587,
+ "step": 252
+ },
+ {
+ "epoch": 0.2530711762683255,
+ "grad_norm": 0.7649476528167725,
+ "learning_rate": 0.0001965038478175436,
+ "loss": 1.9811,
+ "step": 253
+ },
+ {
+ "epoch": 0.25407145759744926,
+ "grad_norm": 0.8786829710006714,
+ "learning_rate": 0.00019647631941511082,
+ "loss": 1.8629,
+ "step": 254
+ },
+ {
+ "epoch": 0.2550717389265731,
+ "grad_norm": 0.7038159966468811,
+ "learning_rate": 0.0001964486850026507,
+ "loss": 1.6885,
+ "step": 255
+ },
+ {
+ "epoch": 0.2560720202556969,
+ "grad_norm": 0.7255909442901611,
+ "learning_rate": 0.00019642094461052852,
+ "loss": 1.7335,
+ "step": 256
+ },
+ {
+ "epoch": 0.25707230158482075,
+ "grad_norm": 0.7780727744102478,
+ "learning_rate": 0.00019639309826922585,
+ "loss": 1.899,
+ "step": 257
+ },
+ {
+ "epoch": 0.2580725829139445,
+ "grad_norm": 0.8533650040626526,
+ "learning_rate": 0.0001963651460093409,
+ "loss": 1.7711,
+ "step": 258
+ },
+ {
+ "epoch": 0.25907286424306836,
+ "grad_norm": 0.6440068483352661,
+ "learning_rate": 0.00019633708786158806,
+ "loss": 1.6685,
+ "step": 259
+ },
+ {
+ "epoch": 0.2600731455721922,
+ "grad_norm": 0.6873877048492432,
+ "learning_rate": 0.00019630892385679818,
+ "loss": 1.7502,
+ "step": 260
+ },
+ {
+ "epoch": 0.261073426901316,
+ "grad_norm": 0.7100672721862793,
+ "learning_rate": 0.00019628065402591845,
+ "loss": 1.7789,
+ "step": 261
+ },
+ {
+ "epoch": 0.2620737082304398,
+ "grad_norm": 0.8447420001029968,
+ "learning_rate": 0.00019625227840001225,
+ "loss": 1.8577,
+ "step": 262
+ },
+ {
+ "epoch": 0.2630739895595636,
+ "grad_norm": 0.767888605594635,
+ "learning_rate": 0.0001962237970102593,
+ "loss": 1.5936,
+ "step": 263
+ },
+ {
+ "epoch": 0.26407427088868746,
+ "grad_norm": 0.6955805420875549,
+ "learning_rate": 0.0001961952098879555,
+ "loss": 1.7733,
+ "step": 264
+ },
+ {
+ "epoch": 0.26507455221781123,
+ "grad_norm": 0.777740478515625,
+ "learning_rate": 0.00019616651706451287,
+ "loss": 1.6027,
+ "step": 265
+ },
+ {
+ "epoch": 0.26607483354693506,
+ "grad_norm": 0.7691099047660828,
+ "learning_rate": 0.0001961377185714597,
+ "loss": 1.7457,
+ "step": 266
+ },
+ {
+ "epoch": 0.2670751148760589,
+ "grad_norm": 0.6778420805931091,
+ "learning_rate": 0.0001961088144404403,
+ "loss": 1.7704,
+ "step": 267
+ },
+ {
+ "epoch": 0.2680753962051827,
+ "grad_norm": 0.7943267226219177,
+ "learning_rate": 0.00019607980470321505,
+ "loss": 1.9775,
+ "step": 268
+ },
+ {
+ "epoch": 0.2690756775343065,
+ "grad_norm": 0.6660135388374329,
+ "learning_rate": 0.00019605068939166045,
+ "loss": 1.6556,
+ "step": 269
+ },
+ {
+ "epoch": 0.27007595886343033,
+ "grad_norm": 0.8664935827255249,
+ "learning_rate": 0.00019602146853776894,
+ "loss": 2.03,
+ "step": 270
+ },
+ {
+ "epoch": 0.27107624019255416,
+ "grad_norm": 0.7783074975013733,
+ "learning_rate": 0.000195992142173649,
+ "loss": 1.7426,
+ "step": 271
+ },
+ {
+ "epoch": 0.272076521521678,
+ "grad_norm": 0.7470223903656006,
+ "learning_rate": 0.0001959627103315249,
+ "loss": 1.7284,
+ "step": 272
+ },
+ {
+ "epoch": 0.27307680285080177,
+ "grad_norm": 0.7284931540489197,
+ "learning_rate": 0.00019593317304373705,
+ "loss": 1.6977,
+ "step": 273
+ },
+ {
+ "epoch": 0.2740770841799256,
+ "grad_norm": 0.7201762795448303,
+ "learning_rate": 0.00019590353034274144,
+ "loss": 1.7184,
+ "step": 274
+ },
+ {
+ "epoch": 0.27507736550904943,
+ "grad_norm": 0.6756151914596558,
+ "learning_rate": 0.00019587378226111014,
+ "loss": 1.7276,
+ "step": 275
+ },
+ {
+ "epoch": 0.27607764683817326,
+ "grad_norm": 0.6784201860427856,
+ "learning_rate": 0.00019584392883153088,
+ "loss": 1.642,
+ "step": 276
+ },
+ {
+ "epoch": 0.27707792816729704,
+ "grad_norm": 0.7387176752090454,
+ "learning_rate": 0.00019581397008680717,
+ "loss": 1.7911,
+ "step": 277
+ },
+ {
+ "epoch": 0.27807820949642087,
+ "grad_norm": 0.9367021918296814,
+ "learning_rate": 0.00019578390605985826,
+ "loss": 2.0034,
+ "step": 278
+ },
+ {
+ "epoch": 0.2790784908255447,
+ "grad_norm": 0.803698718547821,
+ "learning_rate": 0.00019575373678371909,
+ "loss": 1.7907,
+ "step": 279
+ },
+ {
+ "epoch": 0.2800787721546685,
+ "grad_norm": 0.7324479818344116,
+ "learning_rate": 0.00019572346229154025,
+ "loss": 1.5539,
+ "step": 280
+ },
+ {
+ "epoch": 0.2810790534837923,
+ "grad_norm": 0.7107382416725159,
+ "learning_rate": 0.00019569308261658787,
+ "loss": 1.838,
+ "step": 281
+ },
+ {
+ "epoch": 0.28207933481291614,
+ "grad_norm": 0.8698626756668091,
+ "learning_rate": 0.00019566259779224378,
+ "loss": 1.7433,
+ "step": 282
+ },
+ {
+ "epoch": 0.28307961614203997,
+ "grad_norm": 0.7804028391838074,
+ "learning_rate": 0.00019563200785200526,
+ "loss": 1.7161,
+ "step": 283
+ },
+ {
+ "epoch": 0.28407989747116374,
+ "grad_norm": 0.8762909173965454,
+ "learning_rate": 0.00019560131282948516,
+ "loss": 1.8031,
+ "step": 284
+ },
+ {
+ "epoch": 0.2850801788002876,
+ "grad_norm": 0.8252436518669128,
+ "learning_rate": 0.0001955705127584117,
+ "loss": 1.6434,
+ "step": 285
+ },
+ {
+ "epoch": 0.2860804601294114,
+ "grad_norm": 0.8220797181129456,
+ "learning_rate": 0.00019553960767262863,
+ "loss": 1.8522,
+ "step": 286
+ },
+ {
+ "epoch": 0.28708074145853524,
+ "grad_norm": 0.7883003950119019,
+ "learning_rate": 0.00019550859760609503,
+ "loss": 1.8245,
+ "step": 287
+ },
+ {
+ "epoch": 0.288081022787659,
+ "grad_norm": 0.9208703637123108,
+ "learning_rate": 0.00019547748259288536,
+ "loss": 1.8877,
+ "step": 288
+ },
+ {
+ "epoch": 0.28908130411678284,
+ "grad_norm": 0.8452202677726746,
+ "learning_rate": 0.0001954462626671894,
+ "loss": 1.554,
+ "step": 289
+ },
+ {
+ "epoch": 0.2900815854459067,
+ "grad_norm": 0.82865971326828,
+ "learning_rate": 0.0001954149378633122,
+ "loss": 1.655,
+ "step": 290
+ },
+ {
+ "epoch": 0.29108186677503045,
+ "grad_norm": 0.7871205806732178,
+ "learning_rate": 0.00019538350821567404,
+ "loss": 1.621,
+ "step": 291
+ },
+ {
+ "epoch": 0.2920821481041543,
+ "grad_norm": 0.8288848996162415,
+ "learning_rate": 0.00019535197375881045,
+ "loss": 1.9277,
+ "step": 292
+ },
+ {
+ "epoch": 0.2930824294332781,
+ "grad_norm": 0.7275516986846924,
+ "learning_rate": 0.00019532033452737205,
+ "loss": 1.7949,
+ "step": 293
+ },
+ {
+ "epoch": 0.29408271076240194,
+ "grad_norm": 0.7424570322036743,
+ "learning_rate": 0.00019528859055612468,
+ "loss": 1.6407,
+ "step": 294
+ },
+ {
+ "epoch": 0.2950829920915257,
+ "grad_norm": 0.7031363248825073,
+ "learning_rate": 0.0001952567418799492,
+ "loss": 1.8793,
+ "step": 295
+ },
+ {
+ "epoch": 0.29608327342064955,
+ "grad_norm": 0.7190185189247131,
+ "learning_rate": 0.00019522478853384155,
+ "loss": 1.6759,
+ "step": 296
+ },
+ {
+ "epoch": 0.2970835547497734,
+ "grad_norm": 0.7270736694335938,
+ "learning_rate": 0.00019519273055291266,
+ "loss": 1.6351,
+ "step": 297
+ },
+ {
+ "epoch": 0.2980838360788972,
+ "grad_norm": 0.8894152641296387,
+ "learning_rate": 0.00019516056797238846,
+ "loss": 1.7908,
+ "step": 298
+ },
+ {
+ "epoch": 0.299084117408021,
+ "grad_norm": 0.9089106321334839,
+ "learning_rate": 0.00019512830082760987,
+ "loss": 1.6018,
+ "step": 299
+ },
+ {
+ "epoch": 0.3000843987371448,
+ "grad_norm": 0.8772429823875427,
+ "learning_rate": 0.00019509592915403255,
+ "loss": 1.8474,
+ "step": 300
+ },
+ {
+ "epoch": 0.30108468006626865,
+ "grad_norm": 0.8244933485984802,
+ "learning_rate": 0.00019506345298722717,
+ "loss": 1.4324,
+ "step": 301
+ },
+ {
+ "epoch": 0.3020849613953925,
+ "grad_norm": 0.7283012866973877,
+ "learning_rate": 0.00019503087236287913,
+ "loss": 1.5115,
+ "step": 302
+ },
+ {
+ "epoch": 0.30308524272451626,
+ "grad_norm": 0.7721333503723145,
+ "learning_rate": 0.00019499818731678873,
+ "loss": 1.6728,
+ "step": 303
+ },
+ {
+ "epoch": 0.3040855240536401,
+ "grad_norm": 0.7579306960105896,
+ "learning_rate": 0.00019496539788487082,
+ "loss": 1.5927,
+ "step": 304
+ },
+ {
+ "epoch": 0.3050858053827639,
+ "grad_norm": 0.9054704308509827,
+ "learning_rate": 0.0001949325041031551,
+ "loss": 1.9027,
+ "step": 305
+ },
+ {
+ "epoch": 0.3060860867118877,
+ "grad_norm": 0.7023262977600098,
+ "learning_rate": 0.0001948995060077859,
+ "loss": 1.7705,
+ "step": 306
+ },
+ {
+ "epoch": 0.3070863680410115,
+ "grad_norm": 0.7942065000534058,
+ "learning_rate": 0.0001948664036350221,
+ "loss": 1.8269,
+ "step": 307
+ },
+ {
+ "epoch": 0.30808664937013536,
+ "grad_norm": 0.9305068850517273,
+ "learning_rate": 0.00019483319702123732,
+ "loss": 1.8247,
+ "step": 308
+ },
+ {
+ "epoch": 0.3090869306992592,
+ "grad_norm": 0.814664900302887,
+ "learning_rate": 0.00019479988620291956,
+ "loss": 1.9179,
+ "step": 309
+ },
+ {
+ "epoch": 0.31008721202838296,
+ "grad_norm": 0.6418014764785767,
+ "learning_rate": 0.00019476647121667137,
+ "loss": 1.5011,
+ "step": 310
+ },
+ {
+ "epoch": 0.3110874933575068,
+ "grad_norm": 0.7911447882652283,
+ "learning_rate": 0.00019473295209920983,
+ "loss": 1.857,
+ "step": 311
+ },
+ {
+ "epoch": 0.3120877746866306,
+ "grad_norm": 0.7792949676513672,
+ "learning_rate": 0.00019469932888736632,
+ "loss": 1.7279,
+ "step": 312
+ },
+ {
+ "epoch": 0.31308805601575446,
+ "grad_norm": 0.7579171657562256,
+ "learning_rate": 0.00019466560161808674,
+ "loss": 1.6902,
+ "step": 313
+ },
+ {
+ "epoch": 0.31408833734487823,
+ "grad_norm": 0.7052372694015503,
+ "learning_rate": 0.00019463177032843124,
+ "loss": 1.7302,
+ "step": 314
+ },
+ {
+ "epoch": 0.31508861867400206,
+ "grad_norm": 0.7188624143600464,
+ "learning_rate": 0.00019459783505557424,
+ "loss": 1.7338,
+ "step": 315
+ },
+ {
+ "epoch": 0.3160889000031259,
+ "grad_norm": 0.6057978272438049,
+ "learning_rate": 0.00019456379583680452,
+ "loss": 1.6123,
+ "step": 316
+ },
+ {
+ "epoch": 0.31708918133224967,
+ "grad_norm": 0.8339365720748901,
+ "learning_rate": 0.000194529652709525,
+ "loss": 1.9765,
+ "step": 317
+ },
+ {
+ "epoch": 0.3180894626613735,
+ "grad_norm": 0.8524260520935059,
+ "learning_rate": 0.00019449540571125286,
+ "loss": 1.6803,
+ "step": 318
+ },
+ {
+ "epoch": 0.31908974399049733,
+ "grad_norm": 0.7035975456237793,
+ "learning_rate": 0.00019446105487961926,
+ "loss": 1.5792,
+ "step": 319
+ },
+ {
+ "epoch": 0.32009002531962116,
+ "grad_norm": 0.7894249558448792,
+ "learning_rate": 0.0001944266002523696,
+ "loss": 1.6326,
+ "step": 320
+ },
+ {
+ "epoch": 0.32109030664874494,
+ "grad_norm": 0.7716989517211914,
+ "learning_rate": 0.0001943920418673633,
+ "loss": 1.6871,
+ "step": 321
+ },
+ {
+ "epoch": 0.32209058797786877,
+ "grad_norm": 0.7914933562278748,
+ "learning_rate": 0.00019435737976257377,
+ "loss": 1.7148,
+ "step": 322
+ },
+ {
+ "epoch": 0.3230908693069926,
+ "grad_norm": 0.7113205790519714,
+ "learning_rate": 0.00019432261397608834,
+ "loss": 1.5236,
+ "step": 323
+ },
+ {
+ "epoch": 0.32409115063611643,
+ "grad_norm": 0.8609917163848877,
+ "learning_rate": 0.00019428774454610843,
+ "loss": 1.8101,
+ "step": 324
+ },
+ {
+ "epoch": 0.3250914319652402,
+ "grad_norm": 0.7319685220718384,
+ "learning_rate": 0.00019425277151094913,
+ "loss": 1.7712,
+ "step": 325
+ },
+ {
+ "epoch": 0.32609171329436404,
+ "grad_norm": 0.6478747725486755,
+ "learning_rate": 0.00019421769490903957,
+ "loss": 1.8535,
+ "step": 326
+ },
+ {
+ "epoch": 0.32709199462348787,
+ "grad_norm": 0.7025763392448425,
+ "learning_rate": 0.0001941825147789225,
+ "loss": 1.9213,
+ "step": 327
+ },
+ {
+ "epoch": 0.32809227595261165,
+ "grad_norm": 0.7595239877700806,
+ "learning_rate": 0.00019414723115925456,
+ "loss": 1.7449,
+ "step": 328
+ },
+ {
+ "epoch": 0.3290925572817355,
+ "grad_norm": 0.7728105783462524,
+ "learning_rate": 0.0001941118440888061,
+ "loss": 1.8821,
+ "step": 329
+ },
+ {
+ "epoch": 0.3300928386108593,
+ "grad_norm": 0.7430977821350098,
+ "learning_rate": 0.0001940763536064611,
+ "loss": 1.6904,
+ "step": 330
+ },
+ {
+ "epoch": 0.33109311993998314,
+ "grad_norm": 0.7909367680549622,
+ "learning_rate": 0.00019404075975121716,
+ "loss": 1.7899,
+ "step": 331
+ },
+ {
+ "epoch": 0.3320934012691069,
+ "grad_norm": 0.7561226487159729,
+ "learning_rate": 0.0001940050625621855,
+ "loss": 1.7746,
+ "step": 332
+ },
+ {
+ "epoch": 0.33309368259823074,
+ "grad_norm": 0.7602452635765076,
+ "learning_rate": 0.00019396926207859084,
+ "loss": 1.7909,
+ "step": 333
+ },
+ {
+ "epoch": 0.3340939639273546,
+ "grad_norm": 0.8194379806518555,
+ "learning_rate": 0.0001939333583397715,
+ "loss": 1.7039,
+ "step": 334
+ },
+ {
+ "epoch": 0.3350942452564784,
+ "grad_norm": 0.7036342024803162,
+ "learning_rate": 0.00019389735138517915,
+ "loss": 1.6663,
+ "step": 335
+ },
+ {
+ "epoch": 0.3360945265856022,
+ "grad_norm": 0.8429521918296814,
+ "learning_rate": 0.00019386124125437895,
+ "loss": 1.589,
+ "step": 336
+ },
+ {
+ "epoch": 0.337094807914726,
+ "grad_norm": 0.7271071076393127,
+ "learning_rate": 0.00019382502798704935,
+ "loss": 1.646,
+ "step": 337
+ },
+ {
+ "epoch": 0.33809508924384984,
+ "grad_norm": 0.7862086892127991,
+ "learning_rate": 0.00019378871162298227,
+ "loss": 1.6085,
+ "step": 338
+ },
+ {
+ "epoch": 0.3390953705729737,
+ "grad_norm": 0.676815390586853,
+ "learning_rate": 0.00019375229220208276,
+ "loss": 1.7335,
+ "step": 339
+ },
+ {
+ "epoch": 0.34009565190209745,
+ "grad_norm": 0.8916042447090149,
+ "learning_rate": 0.00019371576976436917,
+ "loss": 1.7914,
+ "step": 340
+ },
+ {
+ "epoch": 0.3410959332312213,
+ "grad_norm": 0.7913751006126404,
+ "learning_rate": 0.00019367914434997312,
+ "loss": 1.6031,
+ "step": 341
+ },
+ {
+ "epoch": 0.3420962145603451,
+ "grad_norm": 0.7409866452217102,
+ "learning_rate": 0.00019364241599913924,
+ "loss": 1.6525,
+ "step": 342
+ },
+ {
+ "epoch": 0.3430964958894689,
+ "grad_norm": 0.7472705841064453,
+ "learning_rate": 0.0001936055847522254,
+ "loss": 1.6716,
+ "step": 343
+ },
+ {
+ "epoch": 0.3440967772185927,
+ "grad_norm": 0.7030773758888245,
+ "learning_rate": 0.00019356865064970244,
+ "loss": 1.7134,
+ "step": 344
+ },
+ {
+ "epoch": 0.34509705854771655,
+ "grad_norm": 0.6609564423561096,
+ "learning_rate": 0.0001935316137321543,
+ "loss": 1.7127,
+ "step": 345
+ },
+ {
+ "epoch": 0.3460973398768404,
+ "grad_norm": 0.7811393141746521,
+ "learning_rate": 0.00019349447404027782,
+ "loss": 1.75,
+ "step": 346
+ },
+ {
+ "epoch": 0.34709762120596416,
+ "grad_norm": 0.6980521082878113,
+ "learning_rate": 0.00019345723161488283,
+ "loss": 1.82,
+ "step": 347
+ },
+ {
+ "epoch": 0.348097902535088,
+ "grad_norm": 0.749796986579895,
+ "learning_rate": 0.000193419886496892,
+ "loss": 1.9755,
+ "step": 348
+ },
+ {
+ "epoch": 0.3490981838642118,
+ "grad_norm": 0.9486667513847351,
+ "learning_rate": 0.00019338243872734086,
+ "loss": 1.7047,
+ "step": 349
+ },
+ {
+ "epoch": 0.35009846519333565,
+ "grad_norm": 0.8086081147193909,
+ "learning_rate": 0.00019334488834737775,
+ "loss": 1.661,
+ "step": 350
+ },
+ {
+ "epoch": 0.3510987465224594,
+ "grad_norm": 0.700549840927124,
+ "learning_rate": 0.00019330723539826375,
+ "loss": 1.8696,
+ "step": 351
+ },
+ {
+ "epoch": 0.35209902785158326,
+ "grad_norm": 0.7465476393699646,
+ "learning_rate": 0.00019326947992137262,
+ "loss": 1.5444,
+ "step": 352
+ },
+ {
+ "epoch": 0.3530993091807071,
+ "grad_norm": 0.7370999455451965,
+ "learning_rate": 0.00019323162195819082,
+ "loss": 1.8805,
+ "step": 353
+ },
+ {
+ "epoch": 0.35409959050983086,
+ "grad_norm": 0.719359278678894,
+ "learning_rate": 0.0001931936615503174,
+ "loss": 1.8022,
+ "step": 354
+ },
+ {
+ "epoch": 0.3550998718389547,
+ "grad_norm": 0.7301434278488159,
+ "learning_rate": 0.000193155598739464,
+ "loss": 1.6984,
+ "step": 355
+ },
+ {
+ "epoch": 0.3561001531680785,
+ "grad_norm": 0.7191399335861206,
+ "learning_rate": 0.0001931174335674547,
+ "loss": 1.7229,
+ "step": 356
+ },
+ {
+ "epoch": 0.35710043449720236,
+ "grad_norm": 0.7471932768821716,
+ "learning_rate": 0.0001930791660762262,
+ "loss": 1.7408,
+ "step": 357
+ },
+ {
+ "epoch": 0.35810071582632613,
+ "grad_norm": 0.8197934031486511,
+ "learning_rate": 0.00019304079630782752,
+ "loss": 1.6938,
+ "step": 358
+ },
+ {
+ "epoch": 0.35910099715544996,
+ "grad_norm": 0.7408166527748108,
+ "learning_rate": 0.0001930023243044201,
+ "loss": 1.7798,
+ "step": 359
+ },
+ {
+ "epoch": 0.3601012784845738,
+ "grad_norm": 0.7525373101234436,
+ "learning_rate": 0.00019296375010827773,
+ "loss": 1.711,
+ "step": 360
+ },
+ {
+ "epoch": 0.3611015598136976,
+ "grad_norm": 0.6712046265602112,
+ "learning_rate": 0.00019292507376178643,
+ "loss": 1.8157,
+ "step": 361
+ },
+ {
+ "epoch": 0.3621018411428214,
+ "grad_norm": 0.6712916493415833,
+ "learning_rate": 0.00019288629530744454,
+ "loss": 1.8707,
+ "step": 362
+ },
+ {
+ "epoch": 0.36310212247194523,
+ "grad_norm": 0.6127772331237793,
+ "learning_rate": 0.0001928474147878626,
+ "loss": 1.4743,
+ "step": 363
+ },
+ {
+ "epoch": 0.36410240380106906,
+ "grad_norm": 0.910310685634613,
+ "learning_rate": 0.0001928084322457632,
+ "loss": 1.7956,
+ "step": 364
+ },
+ {
+ "epoch": 0.3651026851301929,
+ "grad_norm": 0.6267688870429993,
+ "learning_rate": 0.00019276934772398114,
+ "loss": 1.4664,
+ "step": 365
+ },
+ {
+ "epoch": 0.36610296645931667,
+ "grad_norm": 0.8317943811416626,
+ "learning_rate": 0.00019273016126546323,
+ "loss": 1.853,
+ "step": 366
+ },
+ {
+ "epoch": 0.3671032477884405,
+ "grad_norm": 0.7581344842910767,
+ "learning_rate": 0.00019269087291326833,
+ "loss": 1.9236,
+ "step": 367
+ },
+ {
+ "epoch": 0.36810352911756433,
+ "grad_norm": 0.9311390519142151,
+ "learning_rate": 0.00019265148271056722,
+ "loss": 1.7019,
+ "step": 368
+ },
+ {
+ "epoch": 0.3691038104466881,
+ "grad_norm": 0.9513958096504211,
+ "learning_rate": 0.0001926119907006426,
+ "loss": 1.7617,
+ "step": 369
+ },
+ {
+ "epoch": 0.37010409177581194,
+ "grad_norm": 0.7407613396644592,
+ "learning_rate": 0.00019257239692688907,
+ "loss": 2.1057,
+ "step": 370
+ },
+ {
+ "epoch": 0.37110437310493577,
+ "grad_norm": 0.7530227899551392,
+ "learning_rate": 0.00019253270143281296,
+ "loss": 1.9844,
+ "step": 371
+ },
+ {
+ "epoch": 0.3721046544340596,
+ "grad_norm": 0.6733037233352661,
+ "learning_rate": 0.00019249290426203252,
+ "loss": 1.8939,
+ "step": 372
+ },
+ {
+ "epoch": 0.3731049357631834,
+ "grad_norm": 0.7037007808685303,
+ "learning_rate": 0.0001924530054582776,
+ "loss": 1.6897,
+ "step": 373
+ },
+ {
+ "epoch": 0.3741052170923072,
+ "grad_norm": 0.7689145803451538,
+ "learning_rate": 0.0001924130050653898,
+ "loss": 1.6544,
+ "step": 374
+ },
+ {
+ "epoch": 0.37510549842143104,
+ "grad_norm": 0.7100968360900879,
+ "learning_rate": 0.00019237290312732226,
+ "loss": 1.7932,
+ "step": 375
+ },
+ {
+ "epoch": 0.37610577975055487,
+ "grad_norm": 0.7645193338394165,
+ "learning_rate": 0.00019233269968813984,
+ "loss": 1.6576,
+ "step": 376
+ },
+ {
+ "epoch": 0.37710606107967864,
+ "grad_norm": 0.6831678152084351,
+ "learning_rate": 0.00019229239479201876,
+ "loss": 1.6675,
+ "step": 377
+ },
+ {
+ "epoch": 0.3781063424088025,
+ "grad_norm": 0.8677794337272644,
+ "learning_rate": 0.0001922519884832469,
+ "loss": 1.6064,
+ "step": 378
+ },
+ {
+ "epoch": 0.3791066237379263,
+ "grad_norm": 0.6727691888809204,
+ "learning_rate": 0.0001922114808062234,
+ "loss": 1.6249,
+ "step": 379
+ },
+ {
+ "epoch": 0.3801069050670501,
+ "grad_norm": 0.6588670611381531,
+ "learning_rate": 0.00019217087180545893,
+ "loss": 1.5541,
+ "step": 380
+ },
+ {
+ "epoch": 0.3811071863961739,
+ "grad_norm": 0.7666369080543518,
+ "learning_rate": 0.0001921301615255754,
+ "loss": 1.7806,
+ "step": 381
+ },
+ {
+ "epoch": 0.38210746772529774,
+ "grad_norm": 0.6465156078338623,
+ "learning_rate": 0.0001920893500113061,
+ "loss": 1.512,
+ "step": 382
+ },
+ {
+ "epoch": 0.3831077490544216,
+ "grad_norm": 0.7854346632957458,
+ "learning_rate": 0.00019204843730749547,
+ "loss": 1.6857,
+ "step": 383
+ },
+ {
+ "epoch": 0.38410803038354535,
+ "grad_norm": 0.6625111103057861,
+ "learning_rate": 0.00019200742345909915,
+ "loss": 1.7033,
+ "step": 384
+ },
+ {
+ "epoch": 0.3851083117126692,
+ "grad_norm": 0.7273709177970886,
+ "learning_rate": 0.00019196630851118398,
+ "loss": 1.665,
+ "step": 385
+ },
+ {
+ "epoch": 0.386108593041793,
+ "grad_norm": 0.6861465573310852,
+ "learning_rate": 0.0001919250925089278,
+ "loss": 1.5028,
+ "step": 386
+ },
+ {
+ "epoch": 0.38710887437091684,
+ "grad_norm": 0.845456063747406,
+ "learning_rate": 0.00019188377549761963,
+ "loss": 1.967,
+ "step": 387
+ },
+ {
+ "epoch": 0.3881091557000406,
+ "grad_norm": 0.6481165289878845,
+ "learning_rate": 0.00019184235752265928,
+ "loss": 1.6053,
+ "step": 388
+ },
+ {
+ "epoch": 0.38910943702916445,
+ "grad_norm": 0.6312947273254395,
+ "learning_rate": 0.00019180083862955772,
+ "loss": 1.4427,
+ "step": 389
+ },
+ {
+ "epoch": 0.3901097183582883,
+ "grad_norm": 0.7874154448509216,
+ "learning_rate": 0.00019175921886393666,
+ "loss": 1.6099,
+ "step": 390
+ },
+ {
+ "epoch": 0.39110999968741206,
+ "grad_norm": 0.6839481592178345,
+ "learning_rate": 0.00019171749827152869,
+ "loss": 1.7004,
+ "step": 391
+ },
+ {
+ "epoch": 0.3921102810165359,
+ "grad_norm": 0.7239277362823486,
+ "learning_rate": 0.0001916756768981772,
+ "loss": 1.8813,
+ "step": 392
+ },
+ {
+ "epoch": 0.3931105623456597,
+ "grad_norm": 0.8241100311279297,
+ "learning_rate": 0.00019163375478983632,
+ "loss": 1.9443,
+ "step": 393
+ },
+ {
+ "epoch": 0.39411084367478355,
+ "grad_norm": 0.7401999235153198,
+ "learning_rate": 0.00019159173199257085,
+ "loss": 1.6663,
+ "step": 394
+ },
+ {
+ "epoch": 0.3951111250039073,
+ "grad_norm": 0.8297036290168762,
+ "learning_rate": 0.00019154960855255628,
+ "loss": 1.8012,
+ "step": 395
+ },
+ {
+ "epoch": 0.39611140633303116,
+ "grad_norm": 0.9661216735839844,
+ "learning_rate": 0.0001915073845160786,
+ "loss": 1.7007,
+ "step": 396
+ },
+ {
+ "epoch": 0.397111687662155,
+ "grad_norm": 1.4041926860809326,
+ "learning_rate": 0.00019146505992953446,
+ "loss": 1.7232,
+ "step": 397
+ },
+ {
+ "epoch": 0.3981119689912788,
+ "grad_norm": 0.8469036221504211,
+ "learning_rate": 0.00019142263483943085,
+ "loss": 1.4479,
+ "step": 398
+ },
+ {
+ "epoch": 0.3991122503204026,
+ "grad_norm": 0.9476561546325684,
+ "learning_rate": 0.00019138010929238534,
+ "loss": 1.8572,
+ "step": 399
+ },
+ {
+ "epoch": 0.4001125316495264,
+ "grad_norm": 0.7196705937385559,
+ "learning_rate": 0.00019133748333512575,
+ "loss": 1.6184,
+ "step": 400
+ },
+ {
+ "epoch": 0.40111281297865026,
+ "grad_norm": 0.8957480192184448,
+ "learning_rate": 0.00019129475701449035,
+ "loss": 1.762,
+ "step": 401
+ },
+ {
+ "epoch": 0.4021130943077741,
+ "grad_norm": 0.7850635647773743,
+ "learning_rate": 0.0001912519303774276,
+ "loss": 1.6764,
+ "step": 402
+ },
+ {
+ "epoch": 0.40311337563689786,
+ "grad_norm": 0.7579814791679382,
+ "learning_rate": 0.0001912090034709963,
+ "loss": 1.6231,
+ "step": 403
+ },
+ {
+ "epoch": 0.4041136569660217,
+ "grad_norm": 0.7173107266426086,
+ "learning_rate": 0.00019116597634236525,
+ "loss": 1.7107,
+ "step": 404
+ },
+ {
+ "epoch": 0.4051139382951455,
+ "grad_norm": 0.7832950353622437,
+ "learning_rate": 0.0001911228490388136,
+ "loss": 1.8608,
+ "step": 405
+ },
+ {
+ "epoch": 0.4061142196242693,
+ "grad_norm": 0.716299295425415,
+ "learning_rate": 0.00019107962160773035,
+ "loss": 1.652,
+ "step": 406
+ },
+ {
+ "epoch": 0.40711450095339313,
+ "grad_norm": 0.6675253510475159,
+ "learning_rate": 0.0001910362940966147,
+ "loss": 1.5963,
+ "step": 407
+ },
+ {
+ "epoch": 0.40811478228251696,
+ "grad_norm": 0.6555336713790894,
+ "learning_rate": 0.00019099286655307568,
+ "loss": 1.4991,
+ "step": 408
+ },
+ {
+ "epoch": 0.4091150636116408,
+ "grad_norm": 0.7307867407798767,
+ "learning_rate": 0.0001909493390248324,
+ "loss": 1.8221,
+ "step": 409
+ },
+ {
+ "epoch": 0.41011534494076457,
+ "grad_norm": 0.6557430624961853,
+ "learning_rate": 0.00019090571155971366,
+ "loss": 1.6484,
+ "step": 410
+ },
+ {
+ "epoch": 0.4111156262698884,
+ "grad_norm": 0.6816605925559998,
+ "learning_rate": 0.00019086198420565823,
+ "loss": 1.5052,
+ "step": 411
+ },
+ {
+ "epoch": 0.41211590759901223,
+ "grad_norm": 0.66513592004776,
+ "learning_rate": 0.00019081815701071445,
+ "loss": 1.8818,
+ "step": 412
+ },
+ {
+ "epoch": 0.41311618892813606,
+ "grad_norm": 0.6807469129562378,
+ "learning_rate": 0.0001907742300230406,
+ "loss": 1.5997,
+ "step": 413
+ },
+ {
+ "epoch": 0.41411647025725984,
+ "grad_norm": 0.8060654401779175,
+ "learning_rate": 0.00019073020329090444,
+ "loss": 1.8099,
+ "step": 414
+ },
+ {
+ "epoch": 0.41511675158638367,
+ "grad_norm": 0.7440110445022583,
+ "learning_rate": 0.0001906860768626834,
+ "loss": 1.4876,
+ "step": 415
+ },
+ {
+ "epoch": 0.4161170329155075,
+ "grad_norm": 0.7675415277481079,
+ "learning_rate": 0.00019064185078686443,
+ "loss": 1.4722,
+ "step": 416
+ },
+ {
+ "epoch": 0.4171173142446313,
+ "grad_norm": 0.6656553149223328,
+ "learning_rate": 0.000190597525112044,
+ "loss": 1.6453,
+ "step": 417
+ },
+ {
+ "epoch": 0.4181175955737551,
+ "grad_norm": 0.730689287185669,
+ "learning_rate": 0.000190553099886928,
+ "loss": 1.6584,
+ "step": 418
+ },
+ {
+ "epoch": 0.41911787690287894,
+ "grad_norm": 0.8425858616828918,
+ "learning_rate": 0.00019050857516033173,
+ "loss": 1.6249,
+ "step": 419
+ },
+ {
+ "epoch": 0.42011815823200277,
+ "grad_norm": 0.7816892266273499,
+ "learning_rate": 0.00019046395098117983,
+ "loss": 1.7532,
+ "step": 420
+ },
+ {
+ "epoch": 0.42111843956112655,
+ "grad_norm": 0.7324026823043823,
+ "learning_rate": 0.00019041922739850616,
+ "loss": 1.8523,
+ "step": 421
+ },
+ {
+ "epoch": 0.4221187208902504,
+ "grad_norm": 0.7473389506340027,
+ "learning_rate": 0.00019037440446145385,
+ "loss": 1.583,
+ "step": 422
+ },
+ {
+ "epoch": 0.4231190022193742,
+ "grad_norm": 0.8720895051956177,
+ "learning_rate": 0.00019032948221927524,
+ "loss": 1.6806,
+ "step": 423
+ },
+ {
+ "epoch": 0.42411928354849804,
+ "grad_norm": 0.728528618812561,
+ "learning_rate": 0.00019028446072133175,
+ "loss": 1.7283,
+ "step": 424
+ },
+ {
+ "epoch": 0.4251195648776218,
+ "grad_norm": 0.739930272102356,
+ "learning_rate": 0.00019023934001709383,
+ "loss": 1.7244,
+ "step": 425
+ },
+ {
+ "epoch": 0.42611984620674564,
+ "grad_norm": 0.7825399041175842,
+ "learning_rate": 0.00019019412015614098,
+ "loss": 1.7871,
+ "step": 426
+ },
+ {
+ "epoch": 0.4271201275358695,
+ "grad_norm": 0.8878734707832336,
+ "learning_rate": 0.00019014880118816164,
+ "loss": 1.6018,
+ "step": 427
+ },
+ {
+ "epoch": 0.4281204088649933,
+ "grad_norm": 0.726259708404541,
+ "learning_rate": 0.0001901033831629532,
+ "loss": 1.7732,
+ "step": 428
+ },
+ {
+ "epoch": 0.4291206901941171,
+ "grad_norm": 0.7620319724082947,
+ "learning_rate": 0.00019005786613042185,
+ "loss": 1.6466,
+ "step": 429
+ },
+ {
+ "epoch": 0.4301209715232409,
+ "grad_norm": 0.7295501828193665,
+ "learning_rate": 0.00019001225014058255,
+ "loss": 1.8708,
+ "step": 430
+ },
+ {
+ "epoch": 0.43112125285236474,
+ "grad_norm": 0.7419458031654358,
+ "learning_rate": 0.00018996653524355902,
+ "loss": 1.6583,
+ "step": 431
+ },
+ {
+ "epoch": 0.4321215341814885,
+ "grad_norm": 0.7701705098152161,
+ "learning_rate": 0.00018992072148958368,
+ "loss": 1.4421,
+ "step": 432
+ },
+ {
+ "epoch": 0.43312181551061235,
+ "grad_norm": 0.8237659931182861,
+ "learning_rate": 0.00018987480892899758,
+ "loss": 1.844,
+ "step": 433
+ },
+ {
+ "epoch": 0.4341220968397362,
+ "grad_norm": 0.6167672276496887,
+ "learning_rate": 0.00018982879761225027,
+ "loss": 1.6193,
+ "step": 434
+ },
+ {
+ "epoch": 0.43512237816886,
+ "grad_norm": 0.7565534710884094,
+ "learning_rate": 0.00018978268758989991,
+ "loss": 1.7655,
+ "step": 435
+ },
+ {
+ "epoch": 0.4361226594979838,
+ "grad_norm": 0.8333333730697632,
+ "learning_rate": 0.00018973647891261307,
+ "loss": 1.5764,
+ "step": 436
+ },
+ {
+ "epoch": 0.4371229408271076,
+ "grad_norm": 0.7404434084892273,
+ "learning_rate": 0.00018969017163116472,
+ "loss": 1.7922,
+ "step": 437
+ },
+ {
+ "epoch": 0.43812322215623145,
+ "grad_norm": 0.7129400372505188,
+ "learning_rate": 0.0001896437657964382,
+ "loss": 1.6925,
+ "step": 438
+ },
+ {
+ "epoch": 0.4391235034853553,
+ "grad_norm": 0.7750307321548462,
+ "learning_rate": 0.00018959726145942508,
+ "loss": 1.8133,
+ "step": 439
+ },
+ {
+ "epoch": 0.44012378481447906,
+ "grad_norm": 0.7244920134544373,
+ "learning_rate": 0.00018955065867122528,
+ "loss": 1.6425,
+ "step": 440
+ },
+ {
+ "epoch": 0.4411240661436029,
+ "grad_norm": 0.7624644637107849,
+ "learning_rate": 0.00018950395748304678,
+ "loss": 1.886,
+ "step": 441
+ },
+ {
+ "epoch": 0.4421243474727267,
+ "grad_norm": 0.7016286849975586,
+ "learning_rate": 0.0001894571579462058,
+ "loss": 1.7308,
+ "step": 442
+ },
+ {
+ "epoch": 0.4431246288018505,
+ "grad_norm": 0.6965353488922119,
+ "learning_rate": 0.00018941026011212654,
+ "loss": 1.5872,
+ "step": 443
+ },
+ {
+ "epoch": 0.4441249101309743,
+ "grad_norm": 0.7479360699653625,
+ "learning_rate": 0.00018936326403234125,
+ "loss": 1.8129,
+ "step": 444
+ },
+ {
+ "epoch": 0.44512519146009816,
+ "grad_norm": 0.7027686834335327,
+ "learning_rate": 0.00018931616975849006,
+ "loss": 1.6433,
+ "step": 445
+ },
+ {
+ "epoch": 0.446125472789222,
+ "grad_norm": 0.7771592140197754,
+ "learning_rate": 0.00018926897734232115,
+ "loss": 1.4645,
+ "step": 446
+ },
+ {
+ "epoch": 0.44712575411834576,
+ "grad_norm": 0.7766458988189697,
+ "learning_rate": 0.0001892216868356904,
+ "loss": 1.7873,
+ "step": 447
+ },
+ {
+ "epoch": 0.4481260354474696,
+ "grad_norm": 0.8146182298660278,
+ "learning_rate": 0.0001891742982905615,
+ "loss": 1.7935,
+ "step": 448
+ },
+ {
+ "epoch": 0.4491263167765934,
+ "grad_norm": 0.6744781136512756,
+ "learning_rate": 0.00018912681175900598,
+ "loss": 1.8916,
+ "step": 449
+ },
+ {
+ "epoch": 0.45012659810571726,
+ "grad_norm": 0.6259024143218994,
+ "learning_rate": 0.00018907922729320285,
+ "loss": 1.6616,
+ "step": 450
+ },
+ {
+ "epoch": 0.45112687943484103,
+ "grad_norm": 0.7717494368553162,
+ "learning_rate": 0.00018903154494543889,
+ "loss": 1.817,
+ "step": 451
+ },
+ {
+ "epoch": 0.45212716076396486,
+ "grad_norm": 0.648040771484375,
+ "learning_rate": 0.00018898376476810834,
+ "loss": 1.6309,
+ "step": 452
+ },
+ {
+ "epoch": 0.4531274420930887,
+ "grad_norm": 0.7560017704963684,
+ "learning_rate": 0.00018893588681371303,
+ "loss": 1.8016,
+ "step": 453
+ },
+ {
+ "epoch": 0.45412772342221247,
+ "grad_norm": 0.8778628706932068,
+ "learning_rate": 0.00018888791113486213,
+ "loss": 1.7797,
+ "step": 454
+ },
+ {
+ "epoch": 0.4551280047513363,
+ "grad_norm": 0.6795655488967896,
+ "learning_rate": 0.00018883983778427227,
+ "loss": 1.6343,
+ "step": 455
+ },
+ {
+ "epoch": 0.45612828608046013,
+ "grad_norm": 0.6690213084220886,
+ "learning_rate": 0.0001887916668147673,
+ "loss": 1.6224,
+ "step": 456
+ },
+ {
+ "epoch": 0.45712856740958396,
+ "grad_norm": 0.7529327869415283,
+ "learning_rate": 0.00018874339827927846,
+ "loss": 1.6396,
+ "step": 457
+ },
+ {
+ "epoch": 0.45812884873870774,
+ "grad_norm": 0.8393098711967468,
+ "learning_rate": 0.00018869503223084414,
+ "loss": 1.8374,
+ "step": 458
+ },
+ {
+ "epoch": 0.45912913006783157,
+ "grad_norm": 0.7435776591300964,
+ "learning_rate": 0.00018864656872260985,
+ "loss": 1.7363,
+ "step": 459
+ },
+ {
+ "epoch": 0.4601294113969554,
+ "grad_norm": 0.6737055778503418,
+ "learning_rate": 0.00018859800780782828,
+ "loss": 1.6661,
+ "step": 460
+ },
+ {
+ "epoch": 0.46112969272607923,
+ "grad_norm": 0.7674340605735779,
+ "learning_rate": 0.000188549349539859,
+ "loss": 1.6269,
+ "step": 461
+ },
+ {
+ "epoch": 0.462129974055203,
+ "grad_norm": 0.7329950928688049,
+ "learning_rate": 0.00018850059397216876,
+ "loss": 1.6989,
+ "step": 462
+ },
+ {
+ "epoch": 0.46313025538432684,
+ "grad_norm": 0.7075778841972351,
+ "learning_rate": 0.00018845174115833099,
+ "loss": 1.7286,
+ "step": 463
+ },
+ {
+ "epoch": 0.46413053671345067,
+ "grad_norm": 0.7973611354827881,
+ "learning_rate": 0.0001884027911520262,
+ "loss": 1.7478,
+ "step": 464
+ },
+ {
+ "epoch": 0.4651308180425745,
+ "grad_norm": 0.7790057063102722,
+ "learning_rate": 0.00018835374400704154,
+ "loss": 1.6659,
+ "step": 465
+ },
+ {
+ "epoch": 0.4661310993716983,
+ "grad_norm": 0.8505310416221619,
+ "learning_rate": 0.00018830459977727096,
+ "loss": 1.6879,
+ "step": 466
+ },
+ {
+ "epoch": 0.4671313807008221,
+ "grad_norm": 0.7616267800331116,
+ "learning_rate": 0.0001882553585167151,
+ "loss": 1.6525,
+ "step": 467
+ },
+ {
+ "epoch": 0.46813166202994594,
+ "grad_norm": 0.8038133978843689,
+ "learning_rate": 0.00018820602027948114,
+ "loss": 1.7929,
+ "step": 468
+ },
+ {
+ "epoch": 0.4691319433590697,
+ "grad_norm": 0.6762365698814392,
+ "learning_rate": 0.00018815658511978298,
+ "loss": 1.6926,
+ "step": 469
+ },
+ {
+ "epoch": 0.47013222468819355,
+ "grad_norm": 0.6515015959739685,
+ "learning_rate": 0.00018810705309194083,
+ "loss": 1.7817,
+ "step": 470
+ },
+ {
+ "epoch": 0.4711325060173174,
+ "grad_norm": 0.696675181388855,
+ "learning_rate": 0.00018805742425038145,
+ "loss": 1.7195,
+ "step": 471
+ },
+ {
+ "epoch": 0.4721327873464412,
+ "grad_norm": 0.7929533123970032,
+ "learning_rate": 0.00018800769864963802,
+ "loss": 2.0165,
+ "step": 472
+ },
+ {
+ "epoch": 0.473133068675565,
+ "grad_norm": 0.7223886251449585,
+ "learning_rate": 0.00018795787634434994,
+ "loss": 1.6708,
+ "step": 473
+ },
+ {
+ "epoch": 0.4741333500046888,
+ "grad_norm": 0.7982028126716614,
+ "learning_rate": 0.0001879079573892629,
+ "loss": 1.628,
+ "step": 474
+ },
+ {
+ "epoch": 0.47513363133381264,
+ "grad_norm": 0.6962152123451233,
+ "learning_rate": 0.00018785794183922883,
+ "loss": 1.6429,
+ "step": 475
+ },
+ {
+ "epoch": 0.4761339126629365,
+ "grad_norm": 0.687489926815033,
+ "learning_rate": 0.00018780782974920572,
+ "loss": 1.4546,
+ "step": 476
+ },
+ {
+ "epoch": 0.47713419399206025,
+ "grad_norm": 0.7260375022888184,
+ "learning_rate": 0.00018775762117425777,
+ "loss": 1.739,
+ "step": 477
+ },
+ {
+ "epoch": 0.4781344753211841,
+ "grad_norm": 0.759400486946106,
+ "learning_rate": 0.0001877073161695551,
+ "loss": 1.6465,
+ "step": 478
+ },
+ {
+ "epoch": 0.4791347566503079,
+ "grad_norm": 0.7412364482879639,
+ "learning_rate": 0.00018765691479037376,
+ "loss": 1.7333,
+ "step": 479
+ },
+ {
+ "epoch": 0.4801350379794317,
+ "grad_norm": 0.6909674406051636,
+ "learning_rate": 0.00018760641709209583,
+ "loss": 1.6936,
+ "step": 480
+ },
+ {
+ "epoch": 0.4811353193085555,
+ "grad_norm": 0.6478050947189331,
+ "learning_rate": 0.0001875558231302091,
+ "loss": 1.4435,
+ "step": 481
+ },
+ {
+ "epoch": 0.48213560063767935,
+ "grad_norm": 0.6662638187408447,
+ "learning_rate": 0.00018750513296030718,
+ "loss": 1.5567,
+ "step": 482
+ },
+ {
+ "epoch": 0.4831358819668032,
+ "grad_norm": 0.6973413825035095,
+ "learning_rate": 0.00018745434663808942,
+ "loss": 1.6434,
+ "step": 483
+ },
+ {
+ "epoch": 0.48413616329592696,
+ "grad_norm": 0.8381956815719604,
+ "learning_rate": 0.0001874034642193608,
+ "loss": 1.8568,
+ "step": 484
+ },
+ {
+ "epoch": 0.4851364446250508,
+ "grad_norm": 0.6522262096405029,
+ "learning_rate": 0.0001873524857600319,
+ "loss": 1.4265,
+ "step": 485
+ },
+ {
+ "epoch": 0.4861367259541746,
+ "grad_norm": 0.7018440961837769,
+ "learning_rate": 0.00018730141131611882,
+ "loss": 1.6914,
+ "step": 486
+ },
+ {
+ "epoch": 0.48713700728329845,
+ "grad_norm": 0.8237236142158508,
+ "learning_rate": 0.00018725024094374315,
+ "loss": 1.4462,
+ "step": 487
+ },
+ {
+ "epoch": 0.4881372886124222,
+ "grad_norm": 0.6507928967475891,
+ "learning_rate": 0.00018719897469913184,
+ "loss": 1.5802,
+ "step": 488
+ },
+ {
+ "epoch": 0.48913756994154606,
+ "grad_norm": 0.8120100498199463,
+ "learning_rate": 0.00018714761263861728,
+ "loss": 1.7819,
+ "step": 489
+ },
+ {
+ "epoch": 0.4901378512706699,
+ "grad_norm": 0.759722888469696,
+ "learning_rate": 0.000187096154818637,
+ "loss": 1.7481,
+ "step": 490
+ },
+ {
+ "epoch": 0.4911381325997937,
+ "grad_norm": 0.7146822214126587,
+ "learning_rate": 0.00018704460129573391,
+ "loss": 1.6217,
+ "step": 491
+ },
+ {
+ "epoch": 0.4921384139289175,
+ "grad_norm": 0.7138429284095764,
+ "learning_rate": 0.00018699295212655596,
+ "loss": 1.7242,
+ "step": 492
+ },
+ {
+ "epoch": 0.4931386952580413,
+ "grad_norm": 0.8145613670349121,
+ "learning_rate": 0.00018694120736785632,
+ "loss": 1.8759,
+ "step": 493
+ },
+ {
+ "epoch": 0.49413897658716516,
+ "grad_norm": 0.6624435186386108,
+ "learning_rate": 0.00018688936707649304,
+ "loss": 1.8632,
+ "step": 494
+ },
+ {
+ "epoch": 0.49513925791628893,
+ "grad_norm": 0.6550843119621277,
+ "learning_rate": 0.00018683743130942928,
+ "loss": 1.7645,
+ "step": 495
+ },
+ {
+ "epoch": 0.49613953924541276,
+ "grad_norm": 0.6931488513946533,
+ "learning_rate": 0.00018678540012373302,
+ "loss": 1.5899,
+ "step": 496
+ },
+ {
+ "epoch": 0.4971398205745366,
+ "grad_norm": 0.684229850769043,
+ "learning_rate": 0.00018673327357657715,
+ "loss": 1.6902,
+ "step": 497
+ },
+ {
+ "epoch": 0.4981401019036604,
+ "grad_norm": 0.7380666136741638,
+ "learning_rate": 0.0001866810517252393,
+ "loss": 1.7115,
+ "step": 498
+ },
+ {
+ "epoch": 0.4991403832327842,
+ "grad_norm": 0.6782827973365784,
+ "learning_rate": 0.00018662873462710184,
+ "loss": 1.495,
+ "step": 499
+ },
+ {
+ "epoch": 0.5001406645619081,
+ "grad_norm": 0.730248749256134,
+ "learning_rate": 0.0001865763223396518,
+ "loss": 1.5147,
+ "step": 500
+ },
+ {
+ "epoch": 0.5011409458910319,
+ "grad_norm": 0.7644149661064148,
+ "learning_rate": 0.00018652381492048083,
+ "loss": 1.7278,
+ "step": 501
+ },
+ {
+ "epoch": 0.5021412272201556,
+ "grad_norm": 0.6977668404579163,
+ "learning_rate": 0.00018647121242728506,
+ "loss": 1.5194,
+ "step": 502
+ },
+ {
+ "epoch": 0.5031415085492795,
+ "grad_norm": 0.7714502215385437,
+ "learning_rate": 0.00018641851491786512,
+ "loss": 2.0039,
+ "step": 503
+ },
+ {
+ "epoch": 0.5041417898784033,
+ "grad_norm": 0.9013757109642029,
+ "learning_rate": 0.00018636572245012606,
+ "loss": 1.8069,
+ "step": 504
+ },
+ {
+ "epoch": 0.5051420712075271,
+ "grad_norm": 0.7173192501068115,
+ "learning_rate": 0.00018631283508207725,
+ "loss": 1.841,
+ "step": 505
+ },
+ {
+ "epoch": 0.506142352536651,
+ "grad_norm": 0.7639481425285339,
+ "learning_rate": 0.00018625985287183233,
+ "loss": 1.5333,
+ "step": 506
+ },
+ {
+ "epoch": 0.5071426338657747,
+ "grad_norm": 0.8779808282852173,
+ "learning_rate": 0.00018620677587760916,
+ "loss": 1.8304,
+ "step": 507
+ },
+ {
+ "epoch": 0.5081429151948985,
+ "grad_norm": 0.8956230282783508,
+ "learning_rate": 0.00018615360415772978,
+ "loss": 1.7228,
+ "step": 508
+ },
+ {
+ "epoch": 0.5091431965240224,
+ "grad_norm": 0.8217945098876953,
+ "learning_rate": 0.00018610033777062025,
+ "loss": 1.4977,
+ "step": 509
+ },
+ {
+ "epoch": 0.5101434778531462,
+ "grad_norm": 0.7628902196884155,
+ "learning_rate": 0.0001860469767748108,
+ "loss": 1.7634,
+ "step": 510
+ },
+ {
+ "epoch": 0.5111437591822701,
+ "grad_norm": 0.635911226272583,
+ "learning_rate": 0.00018599352122893539,
+ "loss": 1.5103,
+ "step": 511
+ },
+ {
+ "epoch": 0.5121440405113938,
+ "grad_norm": 0.6439646482467651,
+ "learning_rate": 0.00018593997119173205,
+ "loss": 1.7281,
+ "step": 512
+ },
+ {
+ "epoch": 0.5131443218405176,
+ "grad_norm": 0.7753567099571228,
+ "learning_rate": 0.00018588632672204264,
+ "loss": 1.9028,
+ "step": 513
+ },
+ {
+ "epoch": 0.5141446031696415,
+ "grad_norm": 0.8296717405319214,
+ "learning_rate": 0.0001858325878788126,
+ "loss": 1.9049,
+ "step": 514
+ },
+ {
+ "epoch": 0.5151448844987653,
+ "grad_norm": 0.7379167079925537,
+ "learning_rate": 0.00018577875472109134,
+ "loss": 1.6262,
+ "step": 515
+ },
+ {
+ "epoch": 0.516145165827889,
+ "grad_norm": 0.634040355682373,
+ "learning_rate": 0.0001857248273080317,
+ "loss": 1.5416,
+ "step": 516
+ },
+ {
+ "epoch": 0.5171454471570129,
+ "grad_norm": 0.7394217252731323,
+ "learning_rate": 0.00018567080569889015,
+ "loss": 1.6035,
+ "step": 517
+ },
+ {
+ "epoch": 0.5181457284861367,
+ "grad_norm": 0.705426037311554,
+ "learning_rate": 0.00018561668995302667,
+ "loss": 1.616,
+ "step": 518
+ },
+ {
+ "epoch": 0.5191460098152605,
+ "grad_norm": 0.778021514415741,
+ "learning_rate": 0.00018556248012990468,
+ "loss": 1.5206,
+ "step": 519
+ },
+ {
+ "epoch": 0.5201462911443844,
+ "grad_norm": 0.7203211188316345,
+ "learning_rate": 0.000185508176289091,
+ "loss": 1.5369,
+ "step": 520
+ },
+ {
+ "epoch": 0.5211465724735082,
+ "grad_norm": 0.7390999794006348,
+ "learning_rate": 0.00018545377849025566,
+ "loss": 1.6438,
+ "step": 521
+ },
+ {
+ "epoch": 0.522146853802632,
+ "grad_norm": 0.6779179573059082,
+ "learning_rate": 0.0001853992867931721,
+ "loss": 1.6268,
+ "step": 522
+ },
+ {
+ "epoch": 0.5231471351317558,
+ "grad_norm": 0.6589105725288391,
+ "learning_rate": 0.00018534470125771674,
+ "loss": 1.8353,
+ "step": 523
+ },
+ {
+ "epoch": 0.5241474164608796,
+ "grad_norm": 0.692081868648529,
+ "learning_rate": 0.0001852900219438693,
+ "loss": 1.7047,
+ "step": 524
+ },
+ {
+ "epoch": 0.5251476977900035,
+ "grad_norm": 0.6639224886894226,
+ "learning_rate": 0.0001852352489117124,
+ "loss": 1.7448,
+ "step": 525
+ },
+ {
+ "epoch": 0.5261479791191273,
+ "grad_norm": 0.7168188095092773,
+ "learning_rate": 0.00018518038222143174,
+ "loss": 1.6734,
+ "step": 526
+ },
+ {
+ "epoch": 0.527148260448251,
+ "grad_norm": 0.7671873569488525,
+ "learning_rate": 0.00018512542193331583,
+ "loss": 1.9392,
+ "step": 527
+ },
+ {
+ "epoch": 0.5281485417773749,
+ "grad_norm": 0.7861583828926086,
+ "learning_rate": 0.00018507036810775615,
+ "loss": 1.5749,
+ "step": 528
+ },
+ {
+ "epoch": 0.5291488231064987,
+ "grad_norm": 0.6727952361106873,
+ "learning_rate": 0.00018501522080524688,
+ "loss": 1.7584,
+ "step": 529
+ },
+ {
+ "epoch": 0.5301491044356225,
+ "grad_norm": 0.7287748456001282,
+ "learning_rate": 0.0001849599800863849,
+ "loss": 1.783,
+ "step": 530
+ },
+ {
+ "epoch": 0.5311493857647464,
+ "grad_norm": 0.6883361339569092,
+ "learning_rate": 0.0001849046460118698,
+ "loss": 1.6104,
+ "step": 531
+ },
+ {
+ "epoch": 0.5321496670938701,
+ "grad_norm": 0.6767789125442505,
+ "learning_rate": 0.0001848492186425037,
+ "loss": 1.8416,
+ "step": 532
+ },
+ {
+ "epoch": 0.533149948422994,
+ "grad_norm": 0.7468088269233704,
+ "learning_rate": 0.0001847936980391913,
+ "loss": 1.8297,
+ "step": 533
+ },
+ {
+ "epoch": 0.5341502297521178,
+ "grad_norm": 0.7560007572174072,
+ "learning_rate": 0.00018473808426293964,
+ "loss": 1.5405,
+ "step": 534
+ },
+ {
+ "epoch": 0.5351505110812416,
+ "grad_norm": 0.6900463104248047,
+ "learning_rate": 0.00018468237737485823,
+ "loss": 1.5117,
+ "step": 535
+ },
+ {
+ "epoch": 0.5361507924103655,
+ "grad_norm": 0.8691229820251465,
+ "learning_rate": 0.00018462657743615888,
+ "loss": 1.724,
+ "step": 536
+ },
+ {
+ "epoch": 0.5371510737394892,
+ "grad_norm": 0.7081372141838074,
+ "learning_rate": 0.00018457068450815562,
+ "loss": 1.6418,
+ "step": 537
+ },
+ {
+ "epoch": 0.538151355068613,
+ "grad_norm": 0.8148525953292847,
+ "learning_rate": 0.00018451469865226464,
+ "loss": 1.8564,
+ "step": 538
+ },
+ {
+ "epoch": 0.5391516363977369,
+ "grad_norm": 0.7306076288223267,
+ "learning_rate": 0.00018445861993000436,
+ "loss": 1.4974,
+ "step": 539
+ },
+ {
+ "epoch": 0.5401519177268607,
+ "grad_norm": 0.815825343132019,
+ "learning_rate": 0.00018440244840299506,
+ "loss": 1.8965,
+ "step": 540
+ },
+ {
+ "epoch": 0.5411521990559846,
+ "grad_norm": 0.753034234046936,
+ "learning_rate": 0.0001843461841329591,
+ "loss": 2.016,
+ "step": 541
+ },
+ {
+ "epoch": 0.5421524803851083,
+ "grad_norm": 0.8658022284507751,
+ "learning_rate": 0.0001842898271817208,
+ "loss": 1.6697,
+ "step": 542
+ },
+ {
+ "epoch": 0.5431527617142321,
+ "grad_norm": 0.7143135666847229,
+ "learning_rate": 0.00018423337761120618,
+ "loss": 1.8741,
+ "step": 543
+ },
+ {
+ "epoch": 0.544153043043356,
+ "grad_norm": 0.6843370795249939,
+ "learning_rate": 0.00018417683548344318,
+ "loss": 1.763,
+ "step": 544
+ },
+ {
+ "epoch": 0.5451533243724798,
+ "grad_norm": 0.6699584126472473,
+ "learning_rate": 0.00018412020086056133,
+ "loss": 1.7126,
+ "step": 545
+ },
+ {
+ "epoch": 0.5461536057016035,
+ "grad_norm": 0.6921600699424744,
+ "learning_rate": 0.0001840634738047918,
+ "loss": 1.6697,
+ "step": 546
+ },
+ {
+ "epoch": 0.5471538870307274,
+ "grad_norm": 0.822501003742218,
+ "learning_rate": 0.0001840066543784675,
+ "loss": 1.7045,
+ "step": 547
+ },
+ {
+ "epoch": 0.5481541683598512,
+ "grad_norm": 0.7563886046409607,
+ "learning_rate": 0.00018394974264402257,
+ "loss": 1.6853,
+ "step": 548
+ },
+ {
+ "epoch": 0.549154449688975,
+ "grad_norm": 0.7408218383789062,
+ "learning_rate": 0.00018389273866399275,
+ "loss": 1.6496,
+ "step": 549
+ },
+ {
+ "epoch": 0.5501547310180989,
+ "grad_norm": 0.6454717516899109,
+ "learning_rate": 0.00018383564250101512,
+ "loss": 1.5063,
+ "step": 550
+ },
+ {
+ "epoch": 0.5511550123472226,
+ "grad_norm": 0.7033074498176575,
+ "learning_rate": 0.000183778454217828,
+ "loss": 1.6432,
+ "step": 551
+ },
+ {
+ "epoch": 0.5521552936763465,
+ "grad_norm": 0.768194854259491,
+ "learning_rate": 0.0001837211738772711,
+ "loss": 2.0594,
+ "step": 552
+ },
+ {
+ "epoch": 0.5531555750054703,
+ "grad_norm": 0.7805166244506836,
+ "learning_rate": 0.000183663801542285,
+ "loss": 1.4317,
+ "step": 553
+ },
+ {
+ "epoch": 0.5541558563345941,
+ "grad_norm": 0.6603556871414185,
+ "learning_rate": 0.00018360633727591155,
+ "loss": 1.4171,
+ "step": 554
+ },
+ {
+ "epoch": 0.555156137663718,
+ "grad_norm": 0.6996607780456543,
+ "learning_rate": 0.00018354878114129367,
+ "loss": 1.6832,
+ "step": 555
+ },
+ {
+ "epoch": 0.5561564189928417,
+ "grad_norm": 0.7861623167991638,
+ "learning_rate": 0.00018349113320167504,
+ "loss": 1.8425,
+ "step": 556
+ },
+ {
+ "epoch": 0.5571567003219655,
+ "grad_norm": 0.8387210369110107,
+ "learning_rate": 0.00018343339352040042,
+ "loss": 2.1272,
+ "step": 557
+ },
+ {
+ "epoch": 0.5581569816510894,
+ "grad_norm": 0.829555094242096,
+ "learning_rate": 0.00018337556216091517,
+ "loss": 1.4835,
+ "step": 558
+ },
+ {
+ "epoch": 0.5591572629802132,
+ "grad_norm": 0.7824863791465759,
+ "learning_rate": 0.00018331763918676556,
+ "loss": 1.8893,
+ "step": 559
+ },
+ {
+ "epoch": 0.560157544309337,
+ "grad_norm": 0.707683801651001,
+ "learning_rate": 0.00018325962466159848,
+ "loss": 1.6492,
+ "step": 560
+ },
+ {
+ "epoch": 0.5611578256384608,
+ "grad_norm": 0.775600254535675,
+ "learning_rate": 0.00018320151864916135,
+ "loss": 1.6542,
+ "step": 561
+ },
+ {
+ "epoch": 0.5621581069675846,
+ "grad_norm": 0.7602002024650574,
+ "learning_rate": 0.00018314332121330225,
+ "loss": 1.9625,
+ "step": 562
+ },
+ {
+ "epoch": 0.5631583882967085,
+ "grad_norm": 0.7535431385040283,
+ "learning_rate": 0.0001830850324179695,
+ "loss": 1.6407,
+ "step": 563
+ },
+ {
+ "epoch": 0.5641586696258323,
+ "grad_norm": 1.1884644031524658,
+ "learning_rate": 0.00018302665232721208,
+ "loss": 1.6188,
+ "step": 564
+ },
+ {
+ "epoch": 0.565158950954956,
+ "grad_norm": 0.7576595544815063,
+ "learning_rate": 0.0001829681810051791,
+ "loss": 1.7739,
+ "step": 565
+ },
+ {
+ "epoch": 0.5661592322840799,
+ "grad_norm": 0.6807442307472229,
+ "learning_rate": 0.00018290961851611995,
+ "loss": 1.6244,
+ "step": 566
+ },
+ {
+ "epoch": 0.5671595136132037,
+ "grad_norm": 0.7222456932067871,
+ "learning_rate": 0.00018285096492438424,
+ "loss": 1.7809,
+ "step": 567
+ },
+ {
+ "epoch": 0.5681597949423275,
+ "grad_norm": 0.7002213597297668,
+ "learning_rate": 0.00018279222029442163,
+ "loss": 1.5462,
+ "step": 568
+ },
+ {
+ "epoch": 0.5691600762714514,
+ "grad_norm": 0.8433569669723511,
+ "learning_rate": 0.00018273338469078186,
+ "loss": 1.5042,
+ "step": 569
+ },
+ {
+ "epoch": 0.5701603576005752,
+ "grad_norm": 0.663144588470459,
+ "learning_rate": 0.00018267445817811466,
+ "loss": 1.7133,
+ "step": 570
+ },
+ {
+ "epoch": 0.5711606389296989,
+ "grad_norm": 0.7298465967178345,
+ "learning_rate": 0.00018261544082116954,
+ "loss": 2.0201,
+ "step": 571
+ },
+ {
+ "epoch": 0.5721609202588228,
+ "grad_norm": 0.7613754868507385,
+ "learning_rate": 0.00018255633268479595,
+ "loss": 1.8065,
+ "step": 572
+ },
+ {
+ "epoch": 0.5731612015879466,
+ "grad_norm": 0.7252177596092224,
+ "learning_rate": 0.00018249713383394303,
+ "loss": 1.5715,
+ "step": 573
+ },
+ {
+ "epoch": 0.5741614829170705,
+ "grad_norm": 0.783961832523346,
+ "learning_rate": 0.0001824378443336596,
+ "loss": 1.7102,
+ "step": 574
+ },
+ {
+ "epoch": 0.5751617642461943,
+ "grad_norm": 0.8532115817070007,
+ "learning_rate": 0.00018237846424909413,
+ "loss": 1.7011,
+ "step": 575
+ },
+ {
+ "epoch": 0.576162045575318,
+ "grad_norm": 0.7841559052467346,
+ "learning_rate": 0.00018231899364549455,
+ "loss": 1.6397,
+ "step": 576
+ },
+ {
+ "epoch": 0.5771623269044419,
+ "grad_norm": 0.7118051648139954,
+ "learning_rate": 0.00018225943258820833,
+ "loss": 1.7166,
+ "step": 577
+ },
+ {
+ "epoch": 0.5781626082335657,
+ "grad_norm": 0.7298933863639832,
+ "learning_rate": 0.00018219978114268227,
+ "loss": 1.604,
+ "step": 578
+ },
+ {
+ "epoch": 0.5791628895626895,
+ "grad_norm": 0.6644678711891174,
+ "learning_rate": 0.00018214003937446253,
+ "loss": 1.7673,
+ "step": 579
+ },
+ {
+ "epoch": 0.5801631708918134,
+ "grad_norm": 0.6707085371017456,
+ "learning_rate": 0.00018208020734919455,
+ "loss": 1.662,
+ "step": 580
+ },
+ {
+ "epoch": 0.5811634522209371,
+ "grad_norm": 0.7431498765945435,
+ "learning_rate": 0.00018202028513262288,
+ "loss": 1.7757,
+ "step": 581
+ },
+ {
+ "epoch": 0.5821637335500609,
+ "grad_norm": 0.6936060190200806,
+ "learning_rate": 0.00018196027279059117,
+ "loss": 1.8464,
+ "step": 582
+ },
+ {
+ "epoch": 0.5831640148791848,
+ "grad_norm": 0.6512508988380432,
+ "learning_rate": 0.00018190017038904215,
+ "loss": 1.5537,
+ "step": 583
+ },
+ {
+ "epoch": 0.5841642962083086,
+ "grad_norm": 0.7541425228118896,
+ "learning_rate": 0.0001818399779940175,
+ "loss": 1.8552,
+ "step": 584
+ },
+ {
+ "epoch": 0.5851645775374325,
+ "grad_norm": 0.720447301864624,
+ "learning_rate": 0.0001817796956716578,
+ "loss": 1.5392,
+ "step": 585
+ },
+ {
+ "epoch": 0.5861648588665562,
+ "grad_norm": 0.7526831030845642,
+ "learning_rate": 0.00018171932348820234,
+ "loss": 1.8224,
+ "step": 586
+ },
+ {
+ "epoch": 0.58716514019568,
+ "grad_norm": 0.6906039714813232,
+ "learning_rate": 0.0001816588615099893,
+ "loss": 1.6498,
+ "step": 587
+ },
+ {
+ "epoch": 0.5881654215248039,
+ "grad_norm": 0.726737380027771,
+ "learning_rate": 0.00018159830980345548,
+ "loss": 1.5377,
+ "step": 588
+ },
+ {
+ "epoch": 0.5891657028539277,
+ "grad_norm": 0.6792006492614746,
+ "learning_rate": 0.0001815376684351362,
+ "loss": 1.8905,
+ "step": 589
+ },
+ {
+ "epoch": 0.5901659841830514,
+ "grad_norm": 0.7885284423828125,
+ "learning_rate": 0.00018147693747166534,
+ "loss": 1.709,
+ "step": 590
+ },
+ {
+ "epoch": 0.5911662655121753,
+ "grad_norm": 0.7270367741584778,
+ "learning_rate": 0.00018141611697977529,
+ "loss": 1.7973,
+ "step": 591
+ },
+ {
+ "epoch": 0.5921665468412991,
+ "grad_norm": 0.7852438688278198,
+ "learning_rate": 0.00018135520702629675,
+ "loss": 1.6312,
+ "step": 592
+ },
+ {
+ "epoch": 0.5931668281704229,
+ "grad_norm": 0.7881343960762024,
+ "learning_rate": 0.0001812942076781588,
+ "loss": 1.7581,
+ "step": 593
+ },
+ {
+ "epoch": 0.5941671094995468,
+ "grad_norm": 0.7581279277801514,
+ "learning_rate": 0.0001812331190023886,
+ "loss": 1.6811,
+ "step": 594
+ },
+ {
+ "epoch": 0.5951673908286705,
+ "grad_norm": 0.7250573039054871,
+ "learning_rate": 0.0001811719410661116,
+ "loss": 1.6835,
+ "step": 595
+ },
+ {
+ "epoch": 0.5961676721577944,
+ "grad_norm": 0.7383652329444885,
+ "learning_rate": 0.00018111067393655132,
+ "loss": 1.7804,
+ "step": 596
+ },
+ {
+ "epoch": 0.5971679534869182,
+ "grad_norm": 0.6631885170936584,
+ "learning_rate": 0.0001810493176810292,
+ "loss": 1.477,
+ "step": 597
+ },
+ {
+ "epoch": 0.598168234816042,
+ "grad_norm": 0.6705698370933533,
+ "learning_rate": 0.00018098787236696474,
+ "loss": 1.5939,
+ "step": 598
+ },
+ {
+ "epoch": 0.5991685161451659,
+ "grad_norm": 0.6646862626075745,
+ "learning_rate": 0.00018092633806187513,
+ "loss": 1.5903,
+ "step": 599
+ },
+ {
+ "epoch": 0.6001687974742896,
+ "grad_norm": 0.7267604470252991,
+ "learning_rate": 0.0001808647148333755,
+ "loss": 1.6864,
+ "step": 600
+ },
+ {
+ "epoch": 0.6011690788034134,
+ "grad_norm": 0.672102689743042,
+ "learning_rate": 0.00018080300274917862,
+ "loss": 1.73,
+ "step": 601
+ },
+ {
+ "epoch": 0.6021693601325373,
+ "grad_norm": 0.7541230320930481,
+ "learning_rate": 0.00018074120187709495,
+ "loss": 1.7824,
+ "step": 602
+ },
+ {
+ "epoch": 0.6031696414616611,
+ "grad_norm": 0.6589316129684448,
+ "learning_rate": 0.00018067931228503246,
+ "loss": 1.7007,
+ "step": 603
+ },
+ {
+ "epoch": 0.604169922790785,
+ "grad_norm": 0.7083007097244263,
+ "learning_rate": 0.00018061733404099655,
+ "loss": 1.7703,
+ "step": 604
+ },
+ {
+ "epoch": 0.6051702041199087,
+ "grad_norm": 0.6700689196586609,
+ "learning_rate": 0.00018055526721309016,
+ "loss": 1.5967,
+ "step": 605
+ },
+ {
+ "epoch": 0.6061704854490325,
+ "grad_norm": 0.6766354441642761,
+ "learning_rate": 0.0001804931118695135,
+ "loss": 1.6208,
+ "step": 606
+ },
+ {
+ "epoch": 0.6071707667781564,
+ "grad_norm": 0.8214102387428284,
+ "learning_rate": 0.00018043086807856403,
+ "loss": 1.6084,
+ "step": 607
+ },
+ {
+ "epoch": 0.6081710481072802,
+ "grad_norm": 0.736492395401001,
+ "learning_rate": 0.00018036853590863648,
+ "loss": 1.7254,
+ "step": 608
+ },
+ {
+ "epoch": 0.609171329436404,
+ "grad_norm": 0.6791033148765564,
+ "learning_rate": 0.00018030611542822257,
+ "loss": 1.5837,
+ "step": 609
+ },
+ {
+ "epoch": 0.6101716107655278,
+ "grad_norm": 0.6344060301780701,
+ "learning_rate": 0.00018024360670591114,
+ "loss": 1.5407,
+ "step": 610
+ },
+ {
+ "epoch": 0.6111718920946516,
+ "grad_norm": 0.9026575088500977,
+ "learning_rate": 0.00018018100981038798,
+ "loss": 1.6748,
+ "step": 611
+ },
+ {
+ "epoch": 0.6121721734237754,
+ "grad_norm": 0.8029866814613342,
+ "learning_rate": 0.00018011832481043576,
+ "loss": 1.7547,
+ "step": 612
+ },
+ {
+ "epoch": 0.6131724547528993,
+ "grad_norm": 0.8065117597579956,
+ "learning_rate": 0.00018005555177493394,
+ "loss": 1.8051,
+ "step": 613
+ },
+ {
+ "epoch": 0.614172736082023,
+ "grad_norm": 0.7858480215072632,
+ "learning_rate": 0.00017999269077285875,
+ "loss": 1.6728,
+ "step": 614
+ },
+ {
+ "epoch": 0.6151730174111469,
+ "grad_norm": 0.6735272407531738,
+ "learning_rate": 0.00017992974187328305,
+ "loss": 1.8585,
+ "step": 615
+ },
+ {
+ "epoch": 0.6161732987402707,
+ "grad_norm": 0.7518951892852783,
+ "learning_rate": 0.00017986670514537627,
+ "loss": 1.5429,
+ "step": 616
+ },
+ {
+ "epoch": 0.6171735800693945,
+ "grad_norm": 0.6952928900718689,
+ "learning_rate": 0.00017980358065840444,
+ "loss": 1.5982,
+ "step": 617
+ },
+ {
+ "epoch": 0.6181738613985184,
+ "grad_norm": 0.8996840119361877,
+ "learning_rate": 0.0001797403684817299,
+ "loss": 1.8164,
+ "step": 618
+ },
+ {
+ "epoch": 0.6191741427276422,
+ "grad_norm": 0.7645425200462341,
+ "learning_rate": 0.00017967706868481144,
+ "loss": 1.8373,
+ "step": 619
+ },
+ {
+ "epoch": 0.6201744240567659,
+ "grad_norm": 0.8479064106941223,
+ "learning_rate": 0.00017961368133720407,
+ "loss": 1.6483,
+ "step": 620
+ },
+ {
+ "epoch": 0.6211747053858898,
+ "grad_norm": 0.7806827425956726,
+ "learning_rate": 0.000179550206508559,
+ "loss": 1.78,
+ "step": 621
+ },
+ {
+ "epoch": 0.6221749867150136,
+ "grad_norm": 0.6476775407791138,
+ "learning_rate": 0.00017948664426862364,
+ "loss": 1.7712,
+ "step": 622
+ },
+ {
+ "epoch": 0.6231752680441374,
+ "grad_norm": 0.8421279788017273,
+ "learning_rate": 0.00017942299468724134,
+ "loss": 1.7753,
+ "step": 623
+ },
+ {
+ "epoch": 0.6241755493732613,
+ "grad_norm": 0.6706071496009827,
+ "learning_rate": 0.0001793592578343515,
+ "loss": 1.4093,
+ "step": 624
+ },
+ {
+ "epoch": 0.625175830702385,
+ "grad_norm": 0.8224231004714966,
+ "learning_rate": 0.0001792954337799894,
+ "loss": 1.7343,
+ "step": 625
+ },
+ {
+ "epoch": 0.6261761120315089,
+ "grad_norm": 0.8398690819740295,
+ "learning_rate": 0.00017923152259428612,
+ "loss": 1.8017,
+ "step": 626
+ },
+ {
+ "epoch": 0.6271763933606327,
+ "grad_norm": 0.6664738059043884,
+ "learning_rate": 0.00017916752434746856,
+ "loss": 1.6023,
+ "step": 627
+ },
+ {
+ "epoch": 0.6281766746897565,
+ "grad_norm": 0.9246477484703064,
+ "learning_rate": 0.0001791034391098591,
+ "loss": 1.7862,
+ "step": 628
+ },
+ {
+ "epoch": 0.6291769560188804,
+ "grad_norm": 0.797835111618042,
+ "learning_rate": 0.00017903926695187595,
+ "loss": 1.6059,
+ "step": 629
+ },
+ {
+ "epoch": 0.6301772373480041,
+ "grad_norm": 0.613727331161499,
+ "learning_rate": 0.0001789750079440326,
+ "loss": 1.5086,
+ "step": 630
+ },
+ {
+ "epoch": 0.6311775186771279,
+ "grad_norm": 0.7127765417098999,
+ "learning_rate": 0.00017891066215693817,
+ "loss": 1.5985,
+ "step": 631
+ },
+ {
+ "epoch": 0.6321778000062518,
+ "grad_norm": 0.6923073530197144,
+ "learning_rate": 0.00017884622966129695,
+ "loss": 1.5537,
+ "step": 632
+ },
+ {
+ "epoch": 0.6331780813353756,
+ "grad_norm": 0.7015733122825623,
+ "learning_rate": 0.00017878171052790868,
+ "loss": 1.7782,
+ "step": 633
+ },
+ {
+ "epoch": 0.6341783626644993,
+ "grad_norm": 0.6932784914970398,
+ "learning_rate": 0.00017871710482766817,
+ "loss": 1.4294,
+ "step": 634
+ },
+ {
+ "epoch": 0.6351786439936232,
+ "grad_norm": 0.7054254412651062,
+ "learning_rate": 0.00017865241263156546,
+ "loss": 1.7227,
+ "step": 635
+ },
+ {
+ "epoch": 0.636178925322747,
+ "grad_norm": 0.6994242072105408,
+ "learning_rate": 0.0001785876340106855,
+ "loss": 1.5998,
+ "step": 636
+ },
+ {
+ "epoch": 0.6371792066518709,
+ "grad_norm": 0.81461501121521,
+ "learning_rate": 0.0001785227690362083,
+ "loss": 1.7212,
+ "step": 637
+ },
+ {
+ "epoch": 0.6381794879809947,
+ "grad_norm": 0.943434476852417,
+ "learning_rate": 0.00017845781777940878,
+ "loss": 1.5926,
+ "step": 638
+ },
+ {
+ "epoch": 0.6391797693101184,
+ "grad_norm": 0.8455945253372192,
+ "learning_rate": 0.00017839278031165658,
+ "loss": 1.8511,
+ "step": 639
+ },
+ {
+ "epoch": 0.6401800506392423,
+ "grad_norm": 0.9348243474960327,
+ "learning_rate": 0.00017832765670441612,
+ "loss": 1.6293,
+ "step": 640
+ },
+ {
+ "epoch": 0.6411803319683661,
+ "grad_norm": 0.746127724647522,
+ "learning_rate": 0.0001782624470292465,
+ "loss": 1.4903,
+ "step": 641
+ },
+ {
+ "epoch": 0.6421806132974899,
+ "grad_norm": 0.6215783357620239,
+ "learning_rate": 0.0001781971513578013,
+ "loss": 1.7806,
+ "step": 642
+ },
+ {
+ "epoch": 0.6431808946266138,
+ "grad_norm": 0.7447994947433472,
+ "learning_rate": 0.00017813176976182873,
+ "loss": 1.7475,
+ "step": 643
+ },
+ {
+ "epoch": 0.6441811759557375,
+ "grad_norm": 0.6916540265083313,
+ "learning_rate": 0.00017806630231317127,
+ "loss": 1.6401,
+ "step": 644
+ },
+ {
+ "epoch": 0.6451814572848613,
+ "grad_norm": 0.7208524942398071,
+ "learning_rate": 0.00017800074908376584,
+ "loss": 1.7524,
+ "step": 645
+ },
+ {
+ "epoch": 0.6461817386139852,
+ "grad_norm": 0.7548331618309021,
+ "learning_rate": 0.00017793511014564358,
+ "loss": 1.5644,
+ "step": 646
+ },
+ {
+ "epoch": 0.647182019943109,
+ "grad_norm": 0.7919667959213257,
+ "learning_rate": 0.00017786938557092983,
+ "loss": 1.6758,
+ "step": 647
+ },
+ {
+ "epoch": 0.6481823012722329,
+ "grad_norm": 0.700618326663971,
+ "learning_rate": 0.00017780357543184397,
+ "loss": 1.5213,
+ "step": 648
+ },
+ {
+ "epoch": 0.6491825826013566,
+ "grad_norm": 0.6646535992622375,
+ "learning_rate": 0.00017773767980069945,
+ "loss": 1.6487,
+ "step": 649
+ },
+ {
+ "epoch": 0.6501828639304804,
+ "grad_norm": 0.6486669182777405,
+ "learning_rate": 0.0001776716987499037,
+ "loss": 1.6556,
+ "step": 650
+ },
+ {
+ "epoch": 0.6511831452596043,
+ "grad_norm": 0.657747745513916,
+ "learning_rate": 0.0001776056323519579,
+ "loss": 1.5943,
+ "step": 651
+ },
+ {
+ "epoch": 0.6521834265887281,
+ "grad_norm": 0.7777379751205444,
+ "learning_rate": 0.00017753948067945712,
+ "loss": 1.6069,
+ "step": 652
+ },
+ {
+ "epoch": 0.6531837079178519,
+ "grad_norm": 0.772153913974762,
+ "learning_rate": 0.00017747324380509006,
+ "loss": 1.7065,
+ "step": 653
+ },
+ {
+ "epoch": 0.6541839892469757,
+ "grad_norm": 0.6984367966651917,
+ "learning_rate": 0.00017740692180163908,
+ "loss": 1.7122,
+ "step": 654
+ },
+ {
+ "epoch": 0.6551842705760995,
+ "grad_norm": 0.8033855557441711,
+ "learning_rate": 0.00017734051474198003,
+ "loss": 1.6095,
+ "step": 655
+ },
+ {
+ "epoch": 0.6561845519052233,
+ "grad_norm": 0.7568691372871399,
+ "learning_rate": 0.0001772740226990823,
+ "loss": 1.6783,
+ "step": 656
+ },
+ {
+ "epoch": 0.6571848332343472,
+ "grad_norm": 0.7288162708282471,
+ "learning_rate": 0.00017720744574600863,
+ "loss": 1.695,
+ "step": 657
+ },
+ {
+ "epoch": 0.658185114563471,
+ "grad_norm": 0.6898120045661926,
+ "learning_rate": 0.00017714078395591502,
+ "loss": 1.6539,
+ "step": 658
+ },
+ {
+ "epoch": 0.6591853958925948,
+ "grad_norm": 0.6977367997169495,
+ "learning_rate": 0.00017707403740205071,
+ "loss": 1.4558,
+ "step": 659
+ },
+ {
+ "epoch": 0.6601856772217186,
+ "grad_norm": 0.6594682335853577,
+ "learning_rate": 0.00017700720615775812,
+ "loss": 1.56,
+ "step": 660
+ },
+ {
+ "epoch": 0.6611859585508424,
+ "grad_norm": 0.6146736741065979,
+ "learning_rate": 0.0001769402902964727,
+ "loss": 1.7014,
+ "step": 661
+ },
+ {
+ "epoch": 0.6621862398799663,
+ "grad_norm": 0.7182234525680542,
+ "learning_rate": 0.00017687328989172288,
+ "loss": 1.5655,
+ "step": 662
+ },
+ {
+ "epoch": 0.66318652120909,
+ "grad_norm": 0.6940692067146301,
+ "learning_rate": 0.00017680620501712996,
+ "loss": 1.6177,
+ "step": 663
+ },
+ {
+ "epoch": 0.6641868025382138,
+ "grad_norm": 0.7672961950302124,
+ "learning_rate": 0.00017673903574640814,
+ "loss": 1.559,
+ "step": 664
+ },
+ {
+ "epoch": 0.6651870838673377,
+ "grad_norm": 0.654500424861908,
+ "learning_rate": 0.00017667178215336423,
+ "loss": 1.5024,
+ "step": 665
+ },
+ {
+ "epoch": 0.6661873651964615,
+ "grad_norm": 0.8137261867523193,
+ "learning_rate": 0.0001766044443118978,
+ "loss": 1.7865,
+ "step": 666
+ },
+ {
+ "epoch": 0.6671876465255854,
+ "grad_norm": 0.806624710559845,
+ "learning_rate": 0.000176537022296001,
+ "loss": 1.4944,
+ "step": 667
+ },
+ {
+ "epoch": 0.6681879278547092,
+ "grad_norm": 0.7952747941017151,
+ "learning_rate": 0.00017646951617975837,
+ "loss": 1.5371,
+ "step": 668
+ },
+ {
+ "epoch": 0.6691882091838329,
+ "grad_norm": 0.6380738615989685,
+ "learning_rate": 0.00017640192603734692,
+ "loss": 1.3117,
+ "step": 669
+ },
+ {
+ "epoch": 0.6701884905129568,
+ "grad_norm": 0.6559002995491028,
+ "learning_rate": 0.00017633425194303606,
+ "loss": 1.3662,
+ "step": 670
+ },
+ {
+ "epoch": 0.6711887718420806,
+ "grad_norm": 0.715826153755188,
+ "learning_rate": 0.00017626649397118734,
+ "loss": 1.7271,
+ "step": 671
+ },
+ {
+ "epoch": 0.6721890531712044,
+ "grad_norm": 0.6719872355461121,
+ "learning_rate": 0.00017619865219625452,
+ "loss": 1.747,
+ "step": 672
+ },
+ {
+ "epoch": 0.6731893345003283,
+ "grad_norm": 0.6901715397834778,
+ "learning_rate": 0.00017613072669278343,
+ "loss": 1.6438,
+ "step": 673
+ },
+ {
+ "epoch": 0.674189615829452,
+ "grad_norm": 0.6601479649543762,
+ "learning_rate": 0.00017606271753541192,
+ "loss": 1.8191,
+ "step": 674
+ },
+ {
+ "epoch": 0.6751898971585758,
+ "grad_norm": 0.8059187531471252,
+ "learning_rate": 0.00017599462479886974,
+ "loss": 1.6946,
+ "step": 675
+ },
+ {
+ "epoch": 0.6761901784876997,
+ "grad_norm": 0.6966856718063354,
+ "learning_rate": 0.00017592644855797854,
+ "loss": 1.5551,
+ "step": 676
+ },
+ {
+ "epoch": 0.6771904598168235,
+ "grad_norm": 0.7306144833564758,
+ "learning_rate": 0.00017585818888765168,
+ "loss": 1.5429,
+ "step": 677
+ },
+ {
+ "epoch": 0.6781907411459474,
+ "grad_norm": 0.572907030582428,
+ "learning_rate": 0.0001757898458628941,
+ "loss": 1.4437,
+ "step": 678
+ },
+ {
+ "epoch": 0.6791910224750711,
+ "grad_norm": 0.6807466149330139,
+ "learning_rate": 0.00017572141955880252,
+ "loss": 1.6307,
+ "step": 679
+ },
+ {
+ "epoch": 0.6801913038041949,
+ "grad_norm": 0.7529204487800598,
+ "learning_rate": 0.00017565291005056504,
+ "loss": 1.631,
+ "step": 680
+ },
+ {
+ "epoch": 0.6811915851333188,
+ "grad_norm": 0.6292940378189087,
+ "learning_rate": 0.00017558431741346122,
+ "loss": 1.7512,
+ "step": 681
+ },
+ {
+ "epoch": 0.6821918664624426,
+ "grad_norm": 0.7981480956077576,
+ "learning_rate": 0.00017551564172286197,
+ "loss": 1.7704,
+ "step": 682
+ },
+ {
+ "epoch": 0.6831921477915663,
+ "grad_norm": 0.7816259860992432,
+ "learning_rate": 0.00017544688305422943,
+ "loss": 1.4954,
+ "step": 683
+ },
+ {
+ "epoch": 0.6841924291206902,
+ "grad_norm": 0.6866456866264343,
+ "learning_rate": 0.00017537804148311695,
+ "loss": 1.7986,
+ "step": 684
+ },
+ {
+ "epoch": 0.685192710449814,
+ "grad_norm": 0.7499064803123474,
+ "learning_rate": 0.00017530911708516902,
+ "loss": 1.6472,
+ "step": 685
+ },
+ {
+ "epoch": 0.6861929917789378,
+ "grad_norm": 0.5923457145690918,
+ "learning_rate": 0.00017524010993612098,
+ "loss": 1.4866,
+ "step": 686
+ },
+ {
+ "epoch": 0.6871932731080617,
+ "grad_norm": 0.6991822719573975,
+ "learning_rate": 0.00017517102011179933,
+ "loss": 1.605,
+ "step": 687
+ },
+ {
+ "epoch": 0.6881935544371854,
+ "grad_norm": 0.7880247235298157,
+ "learning_rate": 0.0001751018476881212,
+ "loss": 1.641,
+ "step": 688
+ },
+ {
+ "epoch": 0.6891938357663093,
+ "grad_norm": 0.7848097085952759,
+ "learning_rate": 0.00017503259274109464,
+ "loss": 1.7505,
+ "step": 689
+ },
+ {
+ "epoch": 0.6901941170954331,
+ "grad_norm": 0.693678081035614,
+ "learning_rate": 0.00017496325534681825,
+ "loss": 1.6565,
+ "step": 690
+ },
+ {
+ "epoch": 0.6911943984245569,
+ "grad_norm": 0.8232877254486084,
+ "learning_rate": 0.00017489383558148136,
+ "loss": 1.7664,
+ "step": 691
+ },
+ {
+ "epoch": 0.6921946797536808,
+ "grad_norm": 0.7834855914115906,
+ "learning_rate": 0.00017482433352136365,
+ "loss": 1.4381,
+ "step": 692
+ },
+ {
+ "epoch": 0.6931949610828045,
+ "grad_norm": 0.6186713576316833,
+ "learning_rate": 0.00017475474924283536,
+ "loss": 1.6482,
+ "step": 693
+ },
+ {
+ "epoch": 0.6941952424119283,
+ "grad_norm": 0.7511133551597595,
+ "learning_rate": 0.00017468508282235704,
+ "loss": 1.6186,
+ "step": 694
+ },
+ {
+ "epoch": 0.6951955237410522,
+ "grad_norm": 0.8017745614051819,
+ "learning_rate": 0.00017461533433647946,
+ "loss": 1.6597,
+ "step": 695
+ },
+ {
+ "epoch": 0.696195805070176,
+ "grad_norm": 0.8190794587135315,
+ "learning_rate": 0.00017454550386184362,
+ "loss": 1.6602,
+ "step": 696
+ },
+ {
+ "epoch": 0.6971960863992998,
+ "grad_norm": 0.7479042410850525,
+ "learning_rate": 0.00017447559147518055,
+ "loss": 1.77,
+ "step": 697
+ },
+ {
+ "epoch": 0.6981963677284236,
+ "grad_norm": 0.7239962816238403,
+ "learning_rate": 0.00017440559725331135,
+ "loss": 1.5838,
+ "step": 698
+ },
+ {
+ "epoch": 0.6991966490575474,
+ "grad_norm": 0.7252762317657471,
+ "learning_rate": 0.000174335521273147,
+ "loss": 1.5462,
+ "step": 699
+ },
+ {
+ "epoch": 0.7001969303866713,
+ "grad_norm": 0.9383960962295532,
+ "learning_rate": 0.00017426536361168834,
+ "loss": 1.5104,
+ "step": 700
+ },
+ {
+ "epoch": 0.7011972117157951,
+ "grad_norm": 0.6944159269332886,
+ "learning_rate": 0.00017419512434602594,
+ "loss": 1.6382,
+ "step": 701
+ },
+ {
+ "epoch": 0.7021974930449189,
+ "grad_norm": 0.6809273362159729,
+ "learning_rate": 0.00017412480355334005,
+ "loss": 1.725,
+ "step": 702
+ },
+ {
+ "epoch": 0.7031977743740427,
+ "grad_norm": 0.7521125674247742,
+ "learning_rate": 0.00017405440131090048,
+ "loss": 1.8499,
+ "step": 703
+ },
+ {
+ "epoch": 0.7041980557031665,
+ "grad_norm": 0.6854100227355957,
+ "learning_rate": 0.00017398391769606658,
+ "loss": 1.6648,
+ "step": 704
+ },
+ {
+ "epoch": 0.7051983370322903,
+ "grad_norm": 0.7382327318191528,
+ "learning_rate": 0.00017391335278628712,
+ "loss": 1.5806,
+ "step": 705
+ },
+ {
+ "epoch": 0.7061986183614142,
+ "grad_norm": 0.7387582063674927,
+ "learning_rate": 0.00017384270665910014,
+ "loss": 1.5563,
+ "step": 706
+ },
+ {
+ "epoch": 0.707198899690538,
+ "grad_norm": 0.7698972821235657,
+ "learning_rate": 0.000173771979392133,
+ "loss": 1.6626,
+ "step": 707
+ },
+ {
+ "epoch": 0.7081991810196617,
+ "grad_norm": 0.7639899849891663,
+ "learning_rate": 0.00017370117106310214,
+ "loss": 1.6725,
+ "step": 708
+ },
+ {
+ "epoch": 0.7091994623487856,
+ "grad_norm": 0.6684393286705017,
+ "learning_rate": 0.0001736302817498131,
+ "loss": 1.64,
+ "step": 709
+ },
+ {
+ "epoch": 0.7101997436779094,
+ "grad_norm": 0.6329504251480103,
+ "learning_rate": 0.00017355931153016044,
+ "loss": 1.4472,
+ "step": 710
+ },
+ {
+ "epoch": 0.7112000250070333,
+ "grad_norm": 0.8133587837219238,
+ "learning_rate": 0.0001734882604821276,
+ "loss": 1.7971,
+ "step": 711
+ },
+ {
+ "epoch": 0.712200306336157,
+ "grad_norm": 0.6524143218994141,
+ "learning_rate": 0.0001734171286837868,
+ "loss": 1.5366,
+ "step": 712
+ },
+ {
+ "epoch": 0.7132005876652808,
+ "grad_norm": 0.6714311242103577,
+ "learning_rate": 0.00017334591621329906,
+ "loss": 1.841,
+ "step": 713
+ },
+ {
+ "epoch": 0.7142008689944047,
+ "grad_norm": 0.6690782904624939,
+ "learning_rate": 0.00017327462314891402,
+ "loss": 1.623,
+ "step": 714
+ },
+ {
+ "epoch": 0.7152011503235285,
+ "grad_norm": 0.650442361831665,
+ "learning_rate": 0.00017320324956896977,
+ "loss": 1.6124,
+ "step": 715
+ },
+ {
+ "epoch": 0.7162014316526523,
+ "grad_norm": 0.7075713276863098,
+ "learning_rate": 0.00017313179555189306,
+ "loss": 1.5154,
+ "step": 716
+ },
+ {
+ "epoch": 0.7172017129817762,
+ "grad_norm": 0.729060173034668,
+ "learning_rate": 0.00017306026117619889,
+ "loss": 1.7072,
+ "step": 717
+ },
+ {
+ "epoch": 0.7182019943108999,
+ "grad_norm": 0.8547433614730835,
+ "learning_rate": 0.0001729886465204906,
+ "loss": 1.6237,
+ "step": 718
+ },
+ {
+ "epoch": 0.7192022756400237,
+ "grad_norm": 0.6729336380958557,
+ "learning_rate": 0.0001729169516634598,
+ "loss": 1.7769,
+ "step": 719
+ },
+ {
+ "epoch": 0.7202025569691476,
+ "grad_norm": 0.7437167167663574,
+ "learning_rate": 0.0001728451766838861,
+ "loss": 1.5056,
+ "step": 720
+ },
+ {
+ "epoch": 0.7212028382982714,
+ "grad_norm": 0.6573147177696228,
+ "learning_rate": 0.00017277332166063726,
+ "loss": 1.7694,
+ "step": 721
+ },
+ {
+ "epoch": 0.7222031196273953,
+ "grad_norm": 0.6767126321792603,
+ "learning_rate": 0.00017270138667266894,
+ "loss": 1.6014,
+ "step": 722
+ },
+ {
+ "epoch": 0.723203400956519,
+ "grad_norm": 0.7488179206848145,
+ "learning_rate": 0.00017262937179902472,
+ "loss": 1.573,
+ "step": 723
+ },
+ {
+ "epoch": 0.7242036822856428,
+ "grad_norm": 0.6491002440452576,
+ "learning_rate": 0.00017255727711883588,
+ "loss": 1.6705,
+ "step": 724
+ },
+ {
+ "epoch": 0.7252039636147667,
+ "grad_norm": 0.764090359210968,
+ "learning_rate": 0.00017248510271132144,
+ "loss": 1.6761,
+ "step": 725
+ },
+ {
+ "epoch": 0.7262042449438905,
+ "grad_norm": 0.7116997838020325,
+ "learning_rate": 0.00017241284865578802,
+ "loss": 1.7435,
+ "step": 726
+ },
+ {
+ "epoch": 0.7272045262730142,
+ "grad_norm": 0.6367645859718323,
+ "learning_rate": 0.00017234051503162978,
+ "loss": 1.7061,
+ "step": 727
+ },
+ {
+ "epoch": 0.7282048076021381,
+ "grad_norm": 0.7232155203819275,
+ "learning_rate": 0.0001722681019183283,
+ "loss": 1.8142,
+ "step": 728
+ },
+ {
+ "epoch": 0.7292050889312619,
+ "grad_norm": 0.7533649802207947,
+ "learning_rate": 0.00017219560939545246,
+ "loss": 1.8202,
+ "step": 729
+ },
+ {
+ "epoch": 0.7302053702603858,
+ "grad_norm": 0.6923018097877502,
+ "learning_rate": 0.00017212303754265843,
+ "loss": 1.4925,
+ "step": 730
+ },
+ {
+ "epoch": 0.7312056515895096,
+ "grad_norm": 0.7326932549476624,
+ "learning_rate": 0.0001720503864396896,
+ "loss": 1.5192,
+ "step": 731
+ },
+ {
+ "epoch": 0.7322059329186333,
+ "grad_norm": 0.7220762968063354,
+ "learning_rate": 0.00017197765616637636,
+ "loss": 1.7601,
+ "step": 732
+ },
+ {
+ "epoch": 0.7332062142477572,
+ "grad_norm": 0.605725884437561,
+ "learning_rate": 0.0001719048468026361,
+ "loss": 1.6309,
+ "step": 733
+ },
+ {
+ "epoch": 0.734206495576881,
+ "grad_norm": 0.6728388667106628,
+ "learning_rate": 0.00017183195842847322,
+ "loss": 1.5993,
+ "step": 734
+ },
+ {
+ "epoch": 0.7352067769060048,
+ "grad_norm": 0.7035244703292847,
+ "learning_rate": 0.0001717589911239788,
+ "loss": 1.6031,
+ "step": 735
+ },
+ {
+ "epoch": 0.7362070582351287,
+ "grad_norm": 0.7473010420799255,
+ "learning_rate": 0.00017168594496933074,
+ "loss": 1.5833,
+ "step": 736
+ },
+ {
+ "epoch": 0.7372073395642524,
+ "grad_norm": 0.6310701370239258,
+ "learning_rate": 0.00017161282004479351,
+ "loss": 1.4328,
+ "step": 737
+ },
+ {
+ "epoch": 0.7382076208933762,
+ "grad_norm": 0.6805673837661743,
+ "learning_rate": 0.0001715396164307182,
+ "loss": 1.5429,
+ "step": 738
+ },
+ {
+ "epoch": 0.7392079022225001,
+ "grad_norm": 0.747222900390625,
+ "learning_rate": 0.0001714663342075424,
+ "loss": 1.7696,
+ "step": 739
+ },
+ {
+ "epoch": 0.7402081835516239,
+ "grad_norm": 0.8214403390884399,
+ "learning_rate": 0.00017139297345578994,
+ "loss": 1.5997,
+ "step": 740
+ },
+ {
+ "epoch": 0.7412084648807478,
+ "grad_norm": 0.6722521781921387,
+ "learning_rate": 0.00017131953425607104,
+ "loss": 1.5287,
+ "step": 741
+ },
+ {
+ "epoch": 0.7422087462098715,
+ "grad_norm": 0.6937971115112305,
+ "learning_rate": 0.00017124601668908212,
+ "loss": 1.7263,
+ "step": 742
+ },
+ {
+ "epoch": 0.7432090275389953,
+ "grad_norm": 0.7590844631195068,
+ "learning_rate": 0.00017117242083560568,
+ "loss": 1.7263,
+ "step": 743
+ },
+ {
+ "epoch": 0.7442093088681192,
+ "grad_norm": 0.7913306355476379,
+ "learning_rate": 0.00017109874677651024,
+ "loss": 1.7646,
+ "step": 744
+ },
+ {
+ "epoch": 0.745209590197243,
+ "grad_norm": 0.7123669385910034,
+ "learning_rate": 0.0001710249945927503,
+ "loss": 1.6768,
+ "step": 745
+ },
+ {
+ "epoch": 0.7462098715263668,
+ "grad_norm": 0.8426288366317749,
+ "learning_rate": 0.00017095116436536612,
+ "loss": 1.8496,
+ "step": 746
+ },
+ {
+ "epoch": 0.7472101528554906,
+ "grad_norm": 0.6152015328407288,
+ "learning_rate": 0.00017087725617548385,
+ "loss": 1.4527,
+ "step": 747
+ },
+ {
+ "epoch": 0.7482104341846144,
+ "grad_norm": 0.8348223567008972,
+ "learning_rate": 0.00017080327010431513,
+ "loss": 1.4847,
+ "step": 748
+ },
+ {
+ "epoch": 0.7492107155137382,
+ "grad_norm": 0.7883800268173218,
+ "learning_rate": 0.00017072920623315734,
+ "loss": 1.5941,
+ "step": 749
+ },
+ {
+ "epoch": 0.7502109968428621,
+ "grad_norm": 0.6957768201828003,
+ "learning_rate": 0.00017065506464339326,
+ "loss": 1.7543,
+ "step": 750
+ },
+ {
+ "epoch": 0.7512112781719859,
+ "grad_norm": 0.5898700952529907,
+ "learning_rate": 0.00017058084541649106,
+ "loss": 1.7859,
+ "step": 751
+ },
+ {
+ "epoch": 0.7522115595011097,
+ "grad_norm": 0.6882239580154419,
+ "learning_rate": 0.00017050654863400429,
+ "loss": 1.3233,
+ "step": 752
+ },
+ {
+ "epoch": 0.7532118408302335,
+ "grad_norm": 0.7327316999435425,
+ "learning_rate": 0.00017043217437757164,
+ "loss": 1.5067,
+ "step": 753
+ },
+ {
+ "epoch": 0.7542121221593573,
+ "grad_norm": 0.9257964491844177,
+ "learning_rate": 0.00017035772272891702,
+ "loss": 1.503,
+ "step": 754
+ },
+ {
+ "epoch": 0.7552124034884812,
+ "grad_norm": 0.7924116253852844,
+ "learning_rate": 0.00017028319376984928,
+ "loss": 1.8975,
+ "step": 755
+ },
+ {
+ "epoch": 0.756212684817605,
+ "grad_norm": 0.6651099920272827,
+ "learning_rate": 0.00017020858758226229,
+ "loss": 1.649,
+ "step": 756
+ },
+ {
+ "epoch": 0.7572129661467287,
+ "grad_norm": 0.7257362604141235,
+ "learning_rate": 0.0001701339042481347,
+ "loss": 1.6919,
+ "step": 757
+ },
+ {
+ "epoch": 0.7582132474758526,
+ "grad_norm": 0.8733739852905273,
+ "learning_rate": 0.00017005914384953007,
+ "loss": 1.5929,
+ "step": 758
+ },
+ {
+ "epoch": 0.7592135288049764,
+ "grad_norm": 0.6347383856773376,
+ "learning_rate": 0.00016998430646859654,
+ "loss": 1.3341,
+ "step": 759
+ },
+ {
+ "epoch": 0.7602138101341002,
+ "grad_norm": 0.6915012001991272,
+ "learning_rate": 0.00016990939218756683,
+ "loss": 1.4971,
+ "step": 760
+ },
+ {
+ "epoch": 0.761214091463224,
+ "grad_norm": 0.7862069606781006,
+ "learning_rate": 0.0001698344010887582,
+ "loss": 1.7468,
+ "step": 761
+ },
+ {
+ "epoch": 0.7622143727923478,
+ "grad_norm": 0.7318029403686523,
+ "learning_rate": 0.0001697593332545723,
+ "loss": 1.8143,
+ "step": 762
+ },
+ {
+ "epoch": 0.7632146541214717,
+ "grad_norm": 0.6758155226707458,
+ "learning_rate": 0.0001696841887674951,
+ "loss": 1.6652,
+ "step": 763
+ },
+ {
+ "epoch": 0.7642149354505955,
+ "grad_norm": 0.6853237748146057,
+ "learning_rate": 0.00016960896771009684,
+ "loss": 1.5176,
+ "step": 764
+ },
+ {
+ "epoch": 0.7652152167797193,
+ "grad_norm": 0.9686934351921082,
+ "learning_rate": 0.00016953367016503182,
+ "loss": 1.5366,
+ "step": 765
+ },
+ {
+ "epoch": 0.7662154981088432,
+ "grad_norm": 0.7232028841972351,
+ "learning_rate": 0.00016945829621503838,
+ "loss": 1.6932,
+ "step": 766
+ },
+ {
+ "epoch": 0.7672157794379669,
+ "grad_norm": 0.6606596112251282,
+ "learning_rate": 0.00016938284594293897,
+ "loss": 1.7051,
+ "step": 767
+ },
+ {
+ "epoch": 0.7682160607670907,
+ "grad_norm": 0.6337714195251465,
+ "learning_rate": 0.00016930731943163972,
+ "loss": 1.6505,
+ "step": 768
+ },
+ {
+ "epoch": 0.7692163420962146,
+ "grad_norm": 0.6292264461517334,
+ "learning_rate": 0.00016923171676413063,
+ "loss": 1.7207,
+ "step": 769
+ },
+ {
+ "epoch": 0.7702166234253384,
+ "grad_norm": 0.7183407545089722,
+ "learning_rate": 0.00016915603802348535,
+ "loss": 1.7025,
+ "step": 770
+ },
+ {
+ "epoch": 0.7712169047544621,
+ "grad_norm": 0.805107593536377,
+ "learning_rate": 0.00016908028329286112,
+ "loss": 1.592,
+ "step": 771
+ },
+ {
+ "epoch": 0.772217186083586,
+ "grad_norm": 0.725777804851532,
+ "learning_rate": 0.0001690044526554987,
+ "loss": 1.6714,
+ "step": 772
+ },
+ {
+ "epoch": 0.7732174674127098,
+ "grad_norm": 0.6801775097846985,
+ "learning_rate": 0.00016892854619472223,
+ "loss": 1.5047,
+ "step": 773
+ },
+ {
+ "epoch": 0.7742177487418337,
+ "grad_norm": 0.7701449990272522,
+ "learning_rate": 0.00016885256399393924,
+ "loss": 1.5506,
+ "step": 774
+ },
+ {
+ "epoch": 0.7752180300709575,
+ "grad_norm": 0.6954746842384338,
+ "learning_rate": 0.00016877650613664034,
+ "loss": 1.4859,
+ "step": 775
+ },
+ {
+ "epoch": 0.7762183114000812,
+ "grad_norm": 0.7431885004043579,
+ "learning_rate": 0.00016870037270639942,
+ "loss": 1.6087,
+ "step": 776
+ },
+ {
+ "epoch": 0.7772185927292051,
+ "grad_norm": 0.687329113483429,
+ "learning_rate": 0.0001686241637868734,
+ "loss": 1.7038,
+ "step": 777
+ },
+ {
+ "epoch": 0.7782188740583289,
+ "grad_norm": 0.6656787395477295,
+ "learning_rate": 0.00016854787946180198,
+ "loss": 1.5691,
+ "step": 778
+ },
+ {
+ "epoch": 0.7792191553874527,
+ "grad_norm": 0.7476064562797546,
+ "learning_rate": 0.00016847151981500789,
+ "loss": 1.4972,
+ "step": 779
+ },
+ {
+ "epoch": 0.7802194367165766,
+ "grad_norm": 0.7320332527160645,
+ "learning_rate": 0.00016839508493039657,
+ "loss": 1.7326,
+ "step": 780
+ },
+ {
+ "epoch": 0.7812197180457003,
+ "grad_norm": 0.6432293057441711,
+ "learning_rate": 0.00016831857489195618,
+ "loss": 1.542,
+ "step": 781
+ },
+ {
+ "epoch": 0.7822199993748241,
+ "grad_norm": 0.6751729846000671,
+ "learning_rate": 0.00016824198978375736,
+ "loss": 1.6864,
+ "step": 782
+ },
+ {
+ "epoch": 0.783220280703948,
+ "grad_norm": 0.770193338394165,
+ "learning_rate": 0.00016816532968995328,
+ "loss": 1.5318,
+ "step": 783
+ },
+ {
+ "epoch": 0.7842205620330718,
+ "grad_norm": 0.6820619106292725,
+ "learning_rate": 0.0001680885946947796,
+ "loss": 1.6004,
+ "step": 784
+ },
+ {
+ "epoch": 0.7852208433621957,
+ "grad_norm": 0.9120951294898987,
+ "learning_rate": 0.00016801178488255413,
+ "loss": 1.6506,
+ "step": 785
+ },
+ {
+ "epoch": 0.7862211246913194,
+ "grad_norm": 0.7819542288780212,
+ "learning_rate": 0.00016793490033767698,
+ "loss": 1.5292,
+ "step": 786
+ },
+ {
+ "epoch": 0.7872214060204432,
+ "grad_norm": 0.6647278666496277,
+ "learning_rate": 0.00016785794114463037,
+ "loss": 1.5941,
+ "step": 787
+ },
+ {
+ "epoch": 0.7882216873495671,
+ "grad_norm": 0.6874713897705078,
+ "learning_rate": 0.00016778090738797853,
+ "loss": 1.5543,
+ "step": 788
+ },
+ {
+ "epoch": 0.7892219686786909,
+ "grad_norm": 0.7759424448013306,
+ "learning_rate": 0.00016770379915236766,
+ "loss": 1.6788,
+ "step": 789
+ },
+ {
+ "epoch": 0.7902222500078147,
+ "grad_norm": 0.724583625793457,
+ "learning_rate": 0.00016762661652252567,
+ "loss": 1.5998,
+ "step": 790
+ },
+ {
+ "epoch": 0.7912225313369385,
+ "grad_norm": 0.7921720743179321,
+ "learning_rate": 0.00016754935958326244,
+ "loss": 1.5956,
+ "step": 791
+ },
+ {
+ "epoch": 0.7922228126660623,
+ "grad_norm": 0.6484968662261963,
+ "learning_rate": 0.00016747202841946928,
+ "loss": 1.5708,
+ "step": 792
+ },
+ {
+ "epoch": 0.7932230939951862,
+ "grad_norm": 0.6372153759002686,
+ "learning_rate": 0.00016739462311611919,
+ "loss": 1.5213,
+ "step": 793
+ },
+ {
+ "epoch": 0.79422337532431,
+ "grad_norm": 0.7025095224380493,
+ "learning_rate": 0.00016731714375826657,
+ "loss": 1.4701,
+ "step": 794
+ },
+ {
+ "epoch": 0.7952236566534338,
+ "grad_norm": 0.681094765663147,
+ "learning_rate": 0.00016723959043104728,
+ "loss": 1.5101,
+ "step": 795
+ },
+ {
+ "epoch": 0.7962239379825576,
+ "grad_norm": 0.7129995822906494,
+ "learning_rate": 0.00016716196321967832,
+ "loss": 1.6038,
+ "step": 796
+ },
+ {
+ "epoch": 0.7972242193116814,
+ "grad_norm": 0.7403759360313416,
+ "learning_rate": 0.00016708426220945802,
+ "loss": 1.5906,
+ "step": 797
+ },
+ {
+ "epoch": 0.7982245006408052,
+ "grad_norm": 0.6562372446060181,
+ "learning_rate": 0.00016700648748576574,
+ "loss": 1.6469,
+ "step": 798
+ },
+ {
+ "epoch": 0.7992247819699291,
+ "grad_norm": 0.839885413646698,
+ "learning_rate": 0.0001669286391340618,
+ "loss": 1.5385,
+ "step": 799
+ },
+ {
+ "epoch": 0.8002250632990529,
+ "grad_norm": 0.8687535524368286,
+ "learning_rate": 0.00016685071723988748,
+ "loss": 1.6759,
+ "step": 800
+ },
+ {
+ "epoch": 0.8012253446281766,
+ "grad_norm": 0.6825409531593323,
+ "learning_rate": 0.00016677272188886483,
+ "loss": 1.841,
+ "step": 801
+ },
+ {
+ "epoch": 0.8022256259573005,
+ "grad_norm": 0.6831037402153015,
+ "learning_rate": 0.00016669465316669667,
+ "loss": 1.5476,
+ "step": 802
+ },
+ {
+ "epoch": 0.8032259072864243,
+ "grad_norm": 0.6906002759933472,
+ "learning_rate": 0.00016661651115916642,
+ "loss": 1.6866,
+ "step": 803
+ },
+ {
+ "epoch": 0.8042261886155482,
+ "grad_norm": 0.7675560116767883,
+ "learning_rate": 0.00016653829595213794,
+ "loss": 1.5663,
+ "step": 804
+ },
+ {
+ "epoch": 0.805226469944672,
+ "grad_norm": 0.6594063639640808,
+ "learning_rate": 0.00016646000763155568,
+ "loss": 1.5247,
+ "step": 805
+ },
+ {
+ "epoch": 0.8062267512737957,
+ "grad_norm": 0.7470384836196899,
+ "learning_rate": 0.00016638164628344425,
+ "loss": 1.6468,
+ "step": 806
+ },
+ {
+ "epoch": 0.8072270326029196,
+ "grad_norm": 0.6874479651451111,
+ "learning_rate": 0.00016630321199390867,
+ "loss": 1.5948,
+ "step": 807
+ },
+ {
+ "epoch": 0.8082273139320434,
+ "grad_norm": 0.7301204204559326,
+ "learning_rate": 0.00016622470484913406,
+ "loss": 1.3922,
+ "step": 808
+ },
+ {
+ "epoch": 0.8092275952611672,
+ "grad_norm": 0.6781039834022522,
+ "learning_rate": 0.00016614612493538551,
+ "loss": 1.6054,
+ "step": 809
+ },
+ {
+ "epoch": 0.810227876590291,
+ "grad_norm": 0.6913226246833801,
+ "learning_rate": 0.00016606747233900815,
+ "loss": 1.5754,
+ "step": 810
+ },
+ {
+ "epoch": 0.8112281579194148,
+ "grad_norm": 0.667425811290741,
+ "learning_rate": 0.00016598874714642697,
+ "loss": 1.8492,
+ "step": 811
+ },
+ {
+ "epoch": 0.8122284392485386,
+ "grad_norm": 0.7662241458892822,
+ "learning_rate": 0.00016590994944414678,
+ "loss": 1.8034,
+ "step": 812
+ },
+ {
+ "epoch": 0.8132287205776625,
+ "grad_norm": 0.7574827075004578,
+ "learning_rate": 0.00016583107931875192,
+ "loss": 1.7435,
+ "step": 813
+ },
+ {
+ "epoch": 0.8142290019067863,
+ "grad_norm": 0.9005519151687622,
+ "learning_rate": 0.0001657521368569064,
+ "loss": 1.6769,
+ "step": 814
+ },
+ {
+ "epoch": 0.8152292832359102,
+ "grad_norm": 0.6895585656166077,
+ "learning_rate": 0.0001656731221453537,
+ "loss": 1.7562,
+ "step": 815
+ },
+ {
+ "epoch": 0.8162295645650339,
+ "grad_norm": 0.7573346495628357,
+ "learning_rate": 0.00016559403527091675,
+ "loss": 1.4748,
+ "step": 816
+ },
+ {
+ "epoch": 0.8172298458941577,
+ "grad_norm": 0.7698647975921631,
+ "learning_rate": 0.0001655148763204977,
+ "loss": 1.6174,
+ "step": 817
+ },
+ {
+ "epoch": 0.8182301272232816,
+ "grad_norm": 0.7975410223007202,
+ "learning_rate": 0.00016543564538107797,
+ "loss": 1.7924,
+ "step": 818
+ },
+ {
+ "epoch": 0.8192304085524054,
+ "grad_norm": 0.9687625169754028,
+ "learning_rate": 0.00016535634253971794,
+ "loss": 1.7725,
+ "step": 819
+ },
+ {
+ "epoch": 0.8202306898815291,
+ "grad_norm": 0.6777274012565613,
+ "learning_rate": 0.00016527696788355714,
+ "loss": 1.5018,
+ "step": 820
+ },
+ {
+ "epoch": 0.821230971210653,
+ "grad_norm": 0.6990464329719543,
+ "learning_rate": 0.00016519752149981397,
+ "loss": 1.5804,
+ "step": 821
+ },
+ {
+ "epoch": 0.8222312525397768,
+ "grad_norm": 0.8445940613746643,
+ "learning_rate": 0.0001651180034757856,
+ "loss": 1.8591,
+ "step": 822
+ },
+ {
+ "epoch": 0.8232315338689006,
+ "grad_norm": 0.8462644815444946,
+ "learning_rate": 0.00016503841389884798,
+ "loss": 1.7582,
+ "step": 823
+ },
+ {
+ "epoch": 0.8242318151980245,
+ "grad_norm": 0.7679311037063599,
+ "learning_rate": 0.00016495875285645566,
+ "loss": 1.5971,
+ "step": 824
+ },
+ {
+ "epoch": 0.8252320965271482,
+ "grad_norm": 0.7734447717666626,
+ "learning_rate": 0.00016487902043614173,
+ "loss": 1.714,
+ "step": 825
+ },
+ {
+ "epoch": 0.8262323778562721,
+ "grad_norm": 0.7890239953994751,
+ "learning_rate": 0.0001647992167255177,
+ "loss": 1.6876,
+ "step": 826
+ },
+ {
+ "epoch": 0.8272326591853959,
+ "grad_norm": 0.8530203104019165,
+ "learning_rate": 0.0001647193418122734,
+ "loss": 1.9096,
+ "step": 827
+ },
+ {
+ "epoch": 0.8282329405145197,
+ "grad_norm": 0.7828260064125061,
+ "learning_rate": 0.00016463939578417692,
+ "loss": 1.5518,
+ "step": 828
+ },
+ {
+ "epoch": 0.8292332218436436,
+ "grad_norm": 0.7015512585639954,
+ "learning_rate": 0.0001645593787290745,
+ "loss": 1.49,
+ "step": 829
+ },
+ {
+ "epoch": 0.8302335031727673,
+ "grad_norm": 0.694771409034729,
+ "learning_rate": 0.0001644792907348904,
+ "loss": 1.5506,
+ "step": 830
+ },
+ {
+ "epoch": 0.8312337845018911,
+ "grad_norm": 0.8167857527732849,
+ "learning_rate": 0.00016439913188962685,
+ "loss": 1.7798,
+ "step": 831
+ },
+ {
+ "epoch": 0.832234065831015,
+ "grad_norm": 0.6682108044624329,
+ "learning_rate": 0.0001643189022813639,
+ "loss": 1.6107,
+ "step": 832
+ },
+ {
+ "epoch": 0.8332343471601388,
+ "grad_norm": 0.8347259163856506,
+ "learning_rate": 0.0001642386019982594,
+ "loss": 1.7672,
+ "step": 833
+ },
+ {
+ "epoch": 0.8342346284892626,
+ "grad_norm": 0.6620945334434509,
+ "learning_rate": 0.00016415823112854883,
+ "loss": 1.6975,
+ "step": 834
+ },
+ {
+ "epoch": 0.8352349098183864,
+ "grad_norm": 0.7286327481269836,
+ "learning_rate": 0.00016407778976054526,
+ "loss": 1.5956,
+ "step": 835
+ },
+ {
+ "epoch": 0.8362351911475102,
+ "grad_norm": 0.6344440579414368,
+ "learning_rate": 0.0001639972779826392,
+ "loss": 1.6455,
+ "step": 836
+ },
+ {
+ "epoch": 0.8372354724766341,
+ "grad_norm": 0.6607793569564819,
+ "learning_rate": 0.0001639166958832985,
+ "loss": 1.6739,
+ "step": 837
+ },
+ {
+ "epoch": 0.8382357538057579,
+ "grad_norm": 0.6973574161529541,
+ "learning_rate": 0.00016383604355106837,
+ "loss": 1.8042,
+ "step": 838
+ },
+ {
+ "epoch": 0.8392360351348817,
+ "grad_norm": 0.7744210958480835,
+ "learning_rate": 0.00016375532107457108,
+ "loss": 1.528,
+ "step": 839
+ },
+ {
+ "epoch": 0.8402363164640055,
+ "grad_norm": 0.6944973468780518,
+ "learning_rate": 0.00016367452854250603,
+ "loss": 1.5498,
+ "step": 840
+ },
+ {
+ "epoch": 0.8412365977931293,
+ "grad_norm": 0.6730696558952332,
+ "learning_rate": 0.00016359366604364972,
+ "loss": 1.5849,
+ "step": 841
+ },
+ {
+ "epoch": 0.8422368791222531,
+ "grad_norm": 0.7051465511322021,
+ "learning_rate": 0.00016351273366685526,
+ "loss": 1.5972,
+ "step": 842
+ },
+ {
+ "epoch": 0.843237160451377,
+ "grad_norm": 0.7309426069259644,
+ "learning_rate": 0.00016343173150105278,
+ "loss": 1.4612,
+ "step": 843
+ },
+ {
+ "epoch": 0.8442374417805008,
+ "grad_norm": 0.7830431461334229,
+ "learning_rate": 0.00016335065963524897,
+ "loss": 1.7208,
+ "step": 844
+ },
+ {
+ "epoch": 0.8452377231096245,
+ "grad_norm": 0.8609834909439087,
+ "learning_rate": 0.0001632695181585272,
+ "loss": 1.8229,
+ "step": 845
+ },
+ {
+ "epoch": 0.8462380044387484,
+ "grad_norm": 0.7489060759544373,
+ "learning_rate": 0.00016318830716004722,
+ "loss": 1.6955,
+ "step": 846
+ },
+ {
+ "epoch": 0.8472382857678722,
+ "grad_norm": 0.636900782585144,
+ "learning_rate": 0.00016310702672904528,
+ "loss": 1.6664,
+ "step": 847
+ },
+ {
+ "epoch": 0.8482385670969961,
+ "grad_norm": 0.6423529386520386,
+ "learning_rate": 0.00016302567695483382,
+ "loss": 1.5356,
+ "step": 848
+ },
+ {
+ "epoch": 0.8492388484261199,
+ "grad_norm": 0.7380033731460571,
+ "learning_rate": 0.0001629442579268016,
+ "loss": 1.4482,
+ "step": 849
+ },
+ {
+ "epoch": 0.8502391297552436,
+ "grad_norm": 0.8258544206619263,
+ "learning_rate": 0.00016286276973441333,
+ "loss": 1.7058,
+ "step": 850
+ },
+ {
+ "epoch": 0.8512394110843675,
+ "grad_norm": 0.6473391056060791,
+ "learning_rate": 0.00016278121246720987,
+ "loss": 1.5374,
+ "step": 851
+ },
+ {
+ "epoch": 0.8522396924134913,
+ "grad_norm": 0.7097072005271912,
+ "learning_rate": 0.00016269958621480788,
+ "loss": 1.6786,
+ "step": 852
+ },
+ {
+ "epoch": 0.8532399737426151,
+ "grad_norm": 0.724993884563446,
+ "learning_rate": 0.0001626178910668998,
+ "loss": 1.6022,
+ "step": 853
+ },
+ {
+ "epoch": 0.854240255071739,
+ "grad_norm": 0.6800474524497986,
+ "learning_rate": 0.00016253612711325386,
+ "loss": 1.6382,
+ "step": 854
+ },
+ {
+ "epoch": 0.8552405364008627,
+ "grad_norm": 0.6339759826660156,
+ "learning_rate": 0.0001624542944437139,
+ "loss": 1.5641,
+ "step": 855
+ },
+ {
+ "epoch": 0.8562408177299866,
+ "grad_norm": 0.6792349219322205,
+ "learning_rate": 0.00016237239314819917,
+ "loss": 1.3713,
+ "step": 856
+ },
+ {
+ "epoch": 0.8572410990591104,
+ "grad_norm": 0.6544696688652039,
+ "learning_rate": 0.0001622904233167044,
+ "loss": 1.5639,
+ "step": 857
+ },
+ {
+ "epoch": 0.8582413803882342,
+ "grad_norm": 0.7736073732376099,
+ "learning_rate": 0.0001622083850392996,
+ "loss": 1.5454,
+ "step": 858
+ },
+ {
+ "epoch": 0.859241661717358,
+ "grad_norm": 0.8642422556877136,
+ "learning_rate": 0.00016212627840613003,
+ "loss": 1.6852,
+ "step": 859
+ },
+ {
+ "epoch": 0.8602419430464818,
+ "grad_norm": 0.6520773768424988,
+ "learning_rate": 0.000162044103507416,
+ "loss": 1.5335,
+ "step": 860
+ },
+ {
+ "epoch": 0.8612422243756056,
+ "grad_norm": 0.7647336721420288,
+ "learning_rate": 0.00016196186043345288,
+ "loss": 1.5578,
+ "step": 861
+ },
+ {
+ "epoch": 0.8622425057047295,
+ "grad_norm": 0.9621163010597229,
+ "learning_rate": 0.00016187954927461093,
+ "loss": 1.6976,
+ "step": 862
+ },
+ {
+ "epoch": 0.8632427870338533,
+ "grad_norm": 0.6847056746482849,
+ "learning_rate": 0.00016179717012133521,
+ "loss": 1.7118,
+ "step": 863
+ },
+ {
+ "epoch": 0.864243068362977,
+ "grad_norm": 0.7482467889785767,
+ "learning_rate": 0.00016171472306414554,
+ "loss": 1.6601,
+ "step": 864
+ },
+ {
+ "epoch": 0.8652433496921009,
+ "grad_norm": 0.7760444283485413,
+ "learning_rate": 0.00016163220819363628,
+ "loss": 1.5587,
+ "step": 865
+ },
+ {
+ "epoch": 0.8662436310212247,
+ "grad_norm": 0.8380980491638184,
+ "learning_rate": 0.00016154962560047643,
+ "loss": 1.7171,
+ "step": 866
+ },
+ {
+ "epoch": 0.8672439123503486,
+ "grad_norm": 0.6927618384361267,
+ "learning_rate": 0.00016146697537540924,
+ "loss": 1.7244,
+ "step": 867
+ },
+ {
+ "epoch": 0.8682441936794724,
+ "grad_norm": 0.7855746746063232,
+ "learning_rate": 0.0001613842576092524,
+ "loss": 1.5848,
+ "step": 868
+ },
+ {
+ "epoch": 0.8692444750085961,
+ "grad_norm": 0.6743006110191345,
+ "learning_rate": 0.00016130147239289778,
+ "loss": 1.6969,
+ "step": 869
+ },
+ {
+ "epoch": 0.87024475633772,
+ "grad_norm": 0.7060980200767517,
+ "learning_rate": 0.00016121861981731135,
+ "loss": 1.5632,
+ "step": 870
+ },
+ {
+ "epoch": 0.8712450376668438,
+ "grad_norm": 0.7673144340515137,
+ "learning_rate": 0.00016113569997353312,
+ "loss": 1.5687,
+ "step": 871
+ },
+ {
+ "epoch": 0.8722453189959676,
+ "grad_norm": 0.8105847239494324,
+ "learning_rate": 0.000161052712952677,
+ "loss": 1.6074,
+ "step": 872
+ },
+ {
+ "epoch": 0.8732456003250915,
+ "grad_norm": 0.6536850333213806,
+ "learning_rate": 0.0001609696588459307,
+ "loss": 1.5842,
+ "step": 873
+ },
+ {
+ "epoch": 0.8742458816542152,
+ "grad_norm": 0.6653574705123901,
+ "learning_rate": 0.00016088653774455568,
+ "loss": 1.4652,
+ "step": 874
+ },
+ {
+ "epoch": 0.875246162983339,
+ "grad_norm": 0.7202721238136292,
+ "learning_rate": 0.00016080334973988695,
+ "loss": 1.5212,
+ "step": 875
+ },
+ {
+ "epoch": 0.8762464443124629,
+ "grad_norm": 0.8218807578086853,
+ "learning_rate": 0.00016072009492333318,
+ "loss": 1.803,
+ "step": 876
+ },
+ {
+ "epoch": 0.8772467256415867,
+ "grad_norm": 0.6170400381088257,
+ "learning_rate": 0.0001606367733863763,
+ "loss": 1.5313,
+ "step": 877
+ },
+ {
+ "epoch": 0.8782470069707106,
+ "grad_norm": 0.6750448346138,
+ "learning_rate": 0.00016055338522057158,
+ "loss": 1.6183,
+ "step": 878
+ },
+ {
+ "epoch": 0.8792472882998343,
+ "grad_norm": 0.6602128148078918,
+ "learning_rate": 0.00016046993051754756,
+ "loss": 1.6669,
+ "step": 879
+ },
+ {
+ "epoch": 0.8802475696289581,
+ "grad_norm": 0.7064031958580017,
+ "learning_rate": 0.00016038640936900586,
+ "loss": 1.7458,
+ "step": 880
+ },
+ {
+ "epoch": 0.881247850958082,
+ "grad_norm": 0.5916783809661865,
+ "learning_rate": 0.00016030282186672116,
+ "loss": 1.4966,
+ "step": 881
+ },
+ {
+ "epoch": 0.8822481322872058,
+ "grad_norm": 0.7189202904701233,
+ "learning_rate": 0.00016021916810254097,
+ "loss": 1.5812,
+ "step": 882
+ },
+ {
+ "epoch": 0.8832484136163296,
+ "grad_norm": 0.7760966420173645,
+ "learning_rate": 0.00016013544816838565,
+ "loss": 1.6709,
+ "step": 883
+ },
+ {
+ "epoch": 0.8842486949454534,
+ "grad_norm": 0.6894650459289551,
+ "learning_rate": 0.00016005166215624827,
+ "loss": 1.6255,
+ "step": 884
+ },
+ {
+ "epoch": 0.8852489762745772,
+ "grad_norm": 0.6777058839797974,
+ "learning_rate": 0.0001599678101581945,
+ "loss": 1.7479,
+ "step": 885
+ },
+ {
+ "epoch": 0.886249257603701,
+ "grad_norm": 0.7056024670600891,
+ "learning_rate": 0.00015988389226636253,
+ "loss": 1.7896,
+ "step": 886
+ },
+ {
+ "epoch": 0.8872495389328249,
+ "grad_norm": 0.6465604305267334,
+ "learning_rate": 0.00015979990857296295,
+ "loss": 1.7363,
+ "step": 887
+ },
+ {
+ "epoch": 0.8882498202619487,
+ "grad_norm": 0.6703017950057983,
+ "learning_rate": 0.00015971585917027862,
+ "loss": 1.6617,
+ "step": 888
+ },
+ {
+ "epoch": 0.8892501015910725,
+ "grad_norm": 0.7116142511367798,
+ "learning_rate": 0.00015963174415066468,
+ "loss": 1.8232,
+ "step": 889
+ },
+ {
+ "epoch": 0.8902503829201963,
+ "grad_norm": 0.7552229762077332,
+ "learning_rate": 0.0001595475636065483,
+ "loss": 1.7847,
+ "step": 890
+ },
+ {
+ "epoch": 0.8912506642493201,
+ "grad_norm": 0.70728999376297,
+ "learning_rate": 0.00015946331763042867,
+ "loss": 1.5665,
+ "step": 891
+ },
+ {
+ "epoch": 0.892250945578444,
+ "grad_norm": 0.6701356768608093,
+ "learning_rate": 0.00015937900631487686,
+ "loss": 1.3572,
+ "step": 892
+ },
+ {
+ "epoch": 0.8932512269075678,
+ "grad_norm": 0.6960388422012329,
+ "learning_rate": 0.00015929462975253585,
+ "loss": 1.5815,
+ "step": 893
+ },
+ {
+ "epoch": 0.8942515082366915,
+ "grad_norm": 0.6505674719810486,
+ "learning_rate": 0.00015921018803612014,
+ "loss": 1.7499,
+ "step": 894
+ },
+ {
+ "epoch": 0.8952517895658154,
+ "grad_norm": 0.604205310344696,
+ "learning_rate": 0.0001591256812584159,
+ "loss": 1.6838,
+ "step": 895
+ },
+ {
+ "epoch": 0.8962520708949392,
+ "grad_norm": 0.5875198841094971,
+ "learning_rate": 0.00015904110951228082,
+ "loss": 1.5147,
+ "step": 896
+ },
+ {
+ "epoch": 0.897252352224063,
+ "grad_norm": 0.6970433592796326,
+ "learning_rate": 0.00015895647289064396,
+ "loss": 1.7767,
+ "step": 897
+ },
+ {
+ "epoch": 0.8982526335531869,
+ "grad_norm": 0.7364515066146851,
+ "learning_rate": 0.00015887177148650564,
+ "loss": 1.6672,
+ "step": 898
+ },
+ {
+ "epoch": 0.8992529148823106,
+ "grad_norm": 0.7843589186668396,
+ "learning_rate": 0.0001587870053929374,
+ "loss": 1.689,
+ "step": 899
+ },
+ {
+ "epoch": 0.9002531962114345,
+ "grad_norm": 0.6405196189880371,
+ "learning_rate": 0.00015870217470308188,
+ "loss": 1.5917,
+ "step": 900
+ },
+ {
+ "epoch": 0.9012534775405583,
+ "grad_norm": 0.7019757628440857,
+ "learning_rate": 0.0001586172795101526,
+ "loss": 1.5497,
+ "step": 901
+ },
+ {
+ "epoch": 0.9022537588696821,
+ "grad_norm": 0.8048270344734192,
+ "learning_rate": 0.00015853231990743406,
+ "loss": 1.5821,
+ "step": 902
+ },
+ {
+ "epoch": 0.903254040198806,
+ "grad_norm": 0.6245777606964111,
+ "learning_rate": 0.0001584472959882815,
+ "loss": 1.5688,
+ "step": 903
+ },
+ {
+ "epoch": 0.9042543215279297,
+ "grad_norm": 0.6584132313728333,
+ "learning_rate": 0.00015836220784612085,
+ "loss": 1.4555,
+ "step": 904
+ },
+ {
+ "epoch": 0.9052546028570535,
+ "grad_norm": 0.7710773944854736,
+ "learning_rate": 0.00015827705557444852,
+ "loss": 1.6416,
+ "step": 905
+ },
+ {
+ "epoch": 0.9062548841861774,
+ "grad_norm": 0.6738126277923584,
+ "learning_rate": 0.00015819183926683153,
+ "loss": 1.6272,
+ "step": 906
+ },
+ {
+ "epoch": 0.9072551655153012,
+ "grad_norm": 0.6698735356330872,
+ "learning_rate": 0.00015810655901690715,
+ "loss": 1.4778,
+ "step": 907
+ },
+ {
+ "epoch": 0.9082554468444249,
+ "grad_norm": 1.0088928937911987,
+ "learning_rate": 0.00015802121491838297,
+ "loss": 1.6854,
+ "step": 908
+ },
+ {
+ "epoch": 0.9092557281735488,
+ "grad_norm": 0.6948708891868591,
+ "learning_rate": 0.0001579358070650367,
+ "loss": 1.5673,
+ "step": 909
+ },
+ {
+ "epoch": 0.9102560095026726,
+ "grad_norm": 0.6728948950767517,
+ "learning_rate": 0.00015785033555071616,
+ "loss": 1.6646,
+ "step": 910
+ },
+ {
+ "epoch": 0.9112562908317965,
+ "grad_norm": 0.8096952438354492,
+ "learning_rate": 0.00015776480046933905,
+ "loss": 1.4675,
+ "step": 911
+ },
+ {
+ "epoch": 0.9122565721609203,
+ "grad_norm": 0.6625403761863708,
+ "learning_rate": 0.000157679201914893,
+ "loss": 1.4793,
+ "step": 912
+ },
+ {
+ "epoch": 0.913256853490044,
+ "grad_norm": 0.7129424810409546,
+ "learning_rate": 0.00015759353998143528,
+ "loss": 1.574,
+ "step": 913
+ },
+ {
+ "epoch": 0.9142571348191679,
+ "grad_norm": 0.6151349544525146,
+ "learning_rate": 0.00015750781476309288,
+ "loss": 1.5631,
+ "step": 914
+ },
+ {
+ "epoch": 0.9152574161482917,
+ "grad_norm": 0.7185074687004089,
+ "learning_rate": 0.00015742202635406235,
+ "loss": 1.8382,
+ "step": 915
+ },
+ {
+ "epoch": 0.9162576974774155,
+ "grad_norm": 0.7076066732406616,
+ "learning_rate": 0.00015733617484860963,
+ "loss": 1.5394,
+ "step": 916
+ },
+ {
+ "epoch": 0.9172579788065394,
+ "grad_norm": 0.7286276817321777,
+ "learning_rate": 0.00015725026034106996,
+ "loss": 1.8139,
+ "step": 917
+ },
+ {
+ "epoch": 0.9182582601356631,
+ "grad_norm": 0.757075846195221,
+ "learning_rate": 0.00015716428292584787,
+ "loss": 1.6768,
+ "step": 918
+ },
+ {
+ "epoch": 0.919258541464787,
+ "grad_norm": 0.6926739811897278,
+ "learning_rate": 0.00015707824269741702,
+ "loss": 1.4541,
+ "step": 919
+ },
+ {
+ "epoch": 0.9202588227939108,
+ "grad_norm": 0.6489847898483276,
+ "learning_rate": 0.00015699213975031996,
+ "loss": 1.4725,
+ "step": 920
+ },
+ {
+ "epoch": 0.9212591041230346,
+ "grad_norm": 0.7668707966804504,
+ "learning_rate": 0.0001569059741791684,
+ "loss": 1.4239,
+ "step": 921
+ },
+ {
+ "epoch": 0.9222593854521585,
+ "grad_norm": 0.736863911151886,
+ "learning_rate": 0.0001568197460786426,
+ "loss": 1.6117,
+ "step": 922
+ },
+ {
+ "epoch": 0.9232596667812822,
+ "grad_norm": 0.8462884426116943,
+ "learning_rate": 0.0001567334555434917,
+ "loss": 1.5025,
+ "step": 923
+ },
+ {
+ "epoch": 0.924259948110406,
+ "grad_norm": 0.7481950521469116,
+ "learning_rate": 0.0001566471026685334,
+ "loss": 1.5024,
+ "step": 924
+ },
+ {
+ "epoch": 0.9252602294395299,
+ "grad_norm": 0.6457516551017761,
+ "learning_rate": 0.00015656068754865387,
+ "loss": 1.4526,
+ "step": 925
+ },
+ {
+ "epoch": 0.9262605107686537,
+ "grad_norm": 0.809140682220459,
+ "learning_rate": 0.00015647421027880772,
+ "loss": 1.4449,
+ "step": 926
+ },
+ {
+ "epoch": 0.9272607920977775,
+ "grad_norm": 0.6967790126800537,
+ "learning_rate": 0.0001563876709540178,
+ "loss": 1.5552,
+ "step": 927
+ },
+ {
+ "epoch": 0.9282610734269013,
+ "grad_norm": 0.6858595609664917,
+ "learning_rate": 0.0001563010696693752,
+ "loss": 1.6202,
+ "step": 928
+ },
+ {
+ "epoch": 0.9292613547560251,
+ "grad_norm": 0.7033559679985046,
+ "learning_rate": 0.00015621440652003907,
+ "loss": 1.7186,
+ "step": 929
+ },
+ {
+ "epoch": 0.930261636085149,
+ "grad_norm": 0.6527283787727356,
+ "learning_rate": 0.00015612768160123652,
+ "loss": 1.5028,
+ "step": 930
+ },
+ {
+ "epoch": 0.9312619174142728,
+ "grad_norm": 0.7243602275848389,
+ "learning_rate": 0.00015604089500826257,
+ "loss": 1.6729,
+ "step": 931
+ },
+ {
+ "epoch": 0.9322621987433966,
+ "grad_norm": 0.6734297275543213,
+ "learning_rate": 0.00015595404683648,
+ "loss": 1.4731,
+ "step": 932
+ },
+ {
+ "epoch": 0.9332624800725204,
+ "grad_norm": 0.7641247510910034,
+ "learning_rate": 0.00015586713718131922,
+ "loss": 1.5851,
+ "step": 933
+ },
+ {
+ "epoch": 0.9342627614016442,
+ "grad_norm": 0.7062788009643555,
+ "learning_rate": 0.0001557801661382782,
+ "loss": 1.5735,
+ "step": 934
+ },
+ {
+ "epoch": 0.935263042730768,
+ "grad_norm": 0.6413556337356567,
+ "learning_rate": 0.00015569313380292248,
+ "loss": 1.5854,
+ "step": 935
+ },
+ {
+ "epoch": 0.9362633240598919,
+ "grad_norm": 0.645720362663269,
+ "learning_rate": 0.00015560604027088477,
+ "loss": 1.5072,
+ "step": 936
+ },
+ {
+ "epoch": 0.9372636053890157,
+ "grad_norm": 0.6726225018501282,
+ "learning_rate": 0.00015551888563786515,
+ "loss": 1.587,
+ "step": 937
+ },
+ {
+ "epoch": 0.9382638867181394,
+ "grad_norm": 0.7043680548667908,
+ "learning_rate": 0.00015543166999963076,
+ "loss": 1.6577,
+ "step": 938
+ },
+ {
+ "epoch": 0.9392641680472633,
+ "grad_norm": 0.7049617767333984,
+ "learning_rate": 0.0001553443934520159,
+ "loss": 1.7624,
+ "step": 939
+ },
+ {
+ "epoch": 0.9402644493763871,
+ "grad_norm": 0.7060776352882385,
+ "learning_rate": 0.00015525705609092157,
+ "loss": 1.6208,
+ "step": 940
+ },
+ {
+ "epoch": 0.941264730705511,
+ "grad_norm": 0.6215025186538696,
+ "learning_rate": 0.00015516965801231586,
+ "loss": 1.4645,
+ "step": 941
+ },
+ {
+ "epoch": 0.9422650120346348,
+ "grad_norm": 0.7021099328994751,
+ "learning_rate": 0.0001550821993122334,
+ "loss": 1.566,
+ "step": 942
+ },
+ {
+ "epoch": 0.9432652933637585,
+ "grad_norm": 0.6451042294502258,
+ "learning_rate": 0.0001549946800867755,
+ "loss": 1.7491,
+ "step": 943
+ },
+ {
+ "epoch": 0.9442655746928824,
+ "grad_norm": 0.7288572192192078,
+ "learning_rate": 0.00015490710043210997,
+ "loss": 1.6302,
+ "step": 944
+ },
+ {
+ "epoch": 0.9452658560220062,
+ "grad_norm": 0.7850833535194397,
+ "learning_rate": 0.00015481946044447099,
+ "loss": 1.5673,
+ "step": 945
+ },
+ {
+ "epoch": 0.94626613735113,
+ "grad_norm": 0.7459181547164917,
+ "learning_rate": 0.00015473176022015906,
+ "loss": 1.4529,
+ "step": 946
+ },
+ {
+ "epoch": 0.9472664186802539,
+ "grad_norm": 0.7002627849578857,
+ "learning_rate": 0.0001546439998555409,
+ "loss": 1.8814,
+ "step": 947
+ },
+ {
+ "epoch": 0.9482667000093776,
+ "grad_norm": 0.6664572358131409,
+ "learning_rate": 0.0001545561794470492,
+ "loss": 1.5337,
+ "step": 948
+ },
+ {
+ "epoch": 0.9492669813385014,
+ "grad_norm": 0.757116973400116,
+ "learning_rate": 0.00015446829909118275,
+ "loss": 1.5775,
+ "step": 949
+ },
+ {
+ "epoch": 0.9502672626676253,
+ "grad_norm": 0.7456643581390381,
+ "learning_rate": 0.00015438035888450623,
+ "loss": 1.525,
+ "step": 950
+ },
+ {
+ "epoch": 0.9512675439967491,
+ "grad_norm": 0.6722500920295715,
+ "learning_rate": 0.00015429235892364994,
+ "loss": 1.5059,
+ "step": 951
+ },
+ {
+ "epoch": 0.952267825325873,
+ "grad_norm": 0.7431210279464722,
+ "learning_rate": 0.00015420429930530996,
+ "loss": 1.6867,
+ "step": 952
+ },
+ {
+ "epoch": 0.9532681066549967,
+ "grad_norm": 0.751015305519104,
+ "learning_rate": 0.00015411618012624786,
+ "loss": 1.7371,
+ "step": 953
+ },
+ {
+ "epoch": 0.9542683879841205,
+ "grad_norm": 0.807579517364502,
+ "learning_rate": 0.00015402800148329071,
+ "loss": 1.7353,
+ "step": 954
+ },
+ {
+ "epoch": 0.9552686693132444,
+ "grad_norm": 0.608161449432373,
+ "learning_rate": 0.00015393976347333088,
+ "loss": 1.3074,
+ "step": 955
+ },
+ {
+ "epoch": 0.9562689506423682,
+ "grad_norm": 0.7092815637588501,
+ "learning_rate": 0.00015385146619332596,
+ "loss": 1.676,
+ "step": 956
+ },
+ {
+ "epoch": 0.9572692319714919,
+ "grad_norm": 0.7639429569244385,
+ "learning_rate": 0.00015376310974029873,
+ "loss": 1.6452,
+ "step": 957
+ },
+ {
+ "epoch": 0.9582695133006158,
+ "grad_norm": 0.7333659529685974,
+ "learning_rate": 0.00015367469421133695,
+ "loss": 1.6821,
+ "step": 958
+ },
+ {
+ "epoch": 0.9592697946297396,
+ "grad_norm": 0.7246838212013245,
+ "learning_rate": 0.00015358621970359325,
+ "loss": 1.5078,
+ "step": 959
+ },
+ {
+ "epoch": 0.9602700759588634,
+ "grad_norm": 0.7209622859954834,
+ "learning_rate": 0.00015349768631428519,
+ "loss": 1.5617,
+ "step": 960
+ },
+ {
+ "epoch": 0.9612703572879873,
+ "grad_norm": 0.7034916877746582,
+ "learning_rate": 0.00015340909414069488,
+ "loss": 1.4711,
+ "step": 961
+ },
+ {
+ "epoch": 0.962270638617111,
+ "grad_norm": 0.7311360239982605,
+ "learning_rate": 0.00015332044328016914,
+ "loss": 1.6488,
+ "step": 962
+ },
+ {
+ "epoch": 0.9632709199462349,
+ "grad_norm": 0.6668992638587952,
+ "learning_rate": 0.0001532317338301192,
+ "loss": 1.6804,
+ "step": 963
+ },
+ {
+ "epoch": 0.9642712012753587,
+ "grad_norm": 0.6265329122543335,
+ "learning_rate": 0.00015314296588802076,
+ "loss": 1.8169,
+ "step": 964
+ },
+ {
+ "epoch": 0.9652714826044825,
+ "grad_norm": 0.6945448517799377,
+ "learning_rate": 0.00015305413955141365,
+ "loss": 1.8041,
+ "step": 965
+ },
+ {
+ "epoch": 0.9662717639336064,
+ "grad_norm": 0.6718643307685852,
+ "learning_rate": 0.00015296525491790205,
+ "loss": 1.3486,
+ "step": 966
+ },
+ {
+ "epoch": 0.9672720452627301,
+ "grad_norm": 0.6232700943946838,
+ "learning_rate": 0.00015287631208515406,
+ "loss": 1.5672,
+ "step": 967
+ },
+ {
+ "epoch": 0.9682723265918539,
+ "grad_norm": 0.7481172680854797,
+ "learning_rate": 0.00015278731115090171,
+ "loss": 1.5992,
+ "step": 968
+ },
+ {
+ "epoch": 0.9692726079209778,
+ "grad_norm": 0.6585466861724854,
+ "learning_rate": 0.00015269825221294098,
+ "loss": 1.6403,
+ "step": 969
+ },
+ {
+ "epoch": 0.9702728892501016,
+ "grad_norm": 0.7587956786155701,
+ "learning_rate": 0.00015260913536913154,
+ "loss": 1.7991,
+ "step": 970
+ },
+ {
+ "epoch": 0.9712731705792254,
+ "grad_norm": 0.672698974609375,
+ "learning_rate": 0.00015251996071739664,
+ "loss": 1.4311,
+ "step": 971
+ },
+ {
+ "epoch": 0.9722734519083492,
+ "grad_norm": 0.7597199082374573,
+ "learning_rate": 0.00015243072835572318,
+ "loss": 1.5692,
+ "step": 972
+ },
+ {
+ "epoch": 0.973273733237473,
+ "grad_norm": 0.7342745661735535,
+ "learning_rate": 0.0001523414383821613,
+ "loss": 1.6364,
+ "step": 973
+ },
+ {
+ "epoch": 0.9742740145665969,
+ "grad_norm": 0.6640815138816833,
+ "learning_rate": 0.00015225209089482462,
+ "loss": 1.5113,
+ "step": 974
+ },
+ {
+ "epoch": 0.9752742958957207,
+ "grad_norm": 0.6298378109931946,
+ "learning_rate": 0.0001521626859918898,
+ "loss": 1.4822,
+ "step": 975
+ },
+ {
+ "epoch": 0.9762745772248445,
+ "grad_norm": 0.6862055659294128,
+ "learning_rate": 0.00015207322377159668,
+ "loss": 1.6159,
+ "step": 976
+ },
+ {
+ "epoch": 0.9772748585539683,
+ "grad_norm": 0.6377236843109131,
+ "learning_rate": 0.00015198370433224805,
+ "loss": 1.6046,
+ "step": 977
+ },
+ {
+ "epoch": 0.9782751398830921,
+ "grad_norm": 0.620070219039917,
+ "learning_rate": 0.00015189412777220958,
+ "loss": 1.589,
+ "step": 978
+ },
+ {
+ "epoch": 0.9792754212122159,
+ "grad_norm": 0.7776119112968445,
+ "learning_rate": 0.00015180449418990976,
+ "loss": 1.485,
+ "step": 979
+ },
+ {
+ "epoch": 0.9802757025413398,
+ "grad_norm": 0.8258413076400757,
+ "learning_rate": 0.00015171480368383964,
+ "loss": 1.5615,
+ "step": 980
+ },
+ {
+ "epoch": 0.9812759838704636,
+ "grad_norm": 0.7297958135604858,
+ "learning_rate": 0.00015162505635255287,
+ "loss": 1.5408,
+ "step": 981
+ },
+ {
+ "epoch": 0.9822762651995874,
+ "grad_norm": 0.5848103165626526,
+ "learning_rate": 0.00015153525229466555,
+ "loss": 1.6821,
+ "step": 982
+ },
+ {
+ "epoch": 0.9832765465287112,
+ "grad_norm": 0.7375655174255371,
+ "learning_rate": 0.00015144539160885613,
+ "loss": 1.7568,
+ "step": 983
+ },
+ {
+ "epoch": 0.984276827857835,
+ "grad_norm": 0.7466885447502136,
+ "learning_rate": 0.00015135547439386516,
+ "loss": 1.5805,
+ "step": 984
+ },
+ {
+ "epoch": 0.9852771091869589,
+ "grad_norm": 0.6645593047142029,
+ "learning_rate": 0.0001512655007484955,
+ "loss": 1.6776,
+ "step": 985
+ },
+ {
+ "epoch": 0.9862773905160827,
+ "grad_norm": 0.7973874807357788,
+ "learning_rate": 0.00015117547077161185,
+ "loss": 1.4931,
+ "step": 986
+ },
+ {
+ "epoch": 0.9872776718452064,
+ "grad_norm": 0.685391902923584,
+ "learning_rate": 0.0001510853845621409,
+ "loss": 1.6254,
+ "step": 987
+ },
+ {
+ "epoch": 0.9882779531743303,
+ "grad_norm": 0.6562414765357971,
+ "learning_rate": 0.00015099524221907107,
+ "loss": 1.6677,
+ "step": 988
+ },
+ {
+ "epoch": 0.9892782345034541,
+ "grad_norm": 0.6216359734535217,
+ "learning_rate": 0.0001509050438414525,
+ "loss": 1.6107,
+ "step": 989
+ },
+ {
+ "epoch": 0.9902785158325779,
+ "grad_norm": 0.7108810544013977,
+ "learning_rate": 0.00015081478952839693,
+ "loss": 1.5268,
+ "step": 990
+ },
+ {
+ "epoch": 0.9912787971617018,
+ "grad_norm": 0.7076026797294617,
+ "learning_rate": 0.00015072447937907753,
+ "loss": 1.3716,
+ "step": 991
+ },
+ {
+ "epoch": 0.9922790784908255,
+ "grad_norm": 0.6056272983551025,
+ "learning_rate": 0.00015063411349272877,
+ "loss": 1.4931,
+ "step": 992
+ },
+ {
+ "epoch": 0.9932793598199494,
+ "grad_norm": 0.726671576499939,
+ "learning_rate": 0.00015054369196864644,
+ "loss": 1.6409,
+ "step": 993
+ },
+ {
+ "epoch": 0.9942796411490732,
+ "grad_norm": 0.7019214630126953,
+ "learning_rate": 0.00015045321490618748,
+ "loss": 1.4476,
+ "step": 994
+ },
+ {
+ "epoch": 0.995279922478197,
+ "grad_norm": 0.755043625831604,
+ "learning_rate": 0.00015036268240476978,
+ "loss": 1.6674,
+ "step": 995
+ },
+ {
+ "epoch": 0.9962802038073209,
+ "grad_norm": 0.7450313568115234,
+ "learning_rate": 0.00015027209456387218,
+ "loss": 1.3706,
+ "step": 996
+ },
+ {
+ "epoch": 0.9972804851364446,
+ "grad_norm": 0.6804680228233337,
+ "learning_rate": 0.00015018145148303438,
+ "loss": 1.3878,
+ "step": 997
+ },
+ {
+ "epoch": 0.9982807664655684,
+ "grad_norm": 0.7353954315185547,
+ "learning_rate": 0.00015009075326185667,
+ "loss": 1.8656,
+ "step": 998
+ },
+ {
+ "epoch": 0.9992810477946923,
+ "grad_norm": 0.7213340401649475,
+ "learning_rate": 0.00015000000000000001,
+ "loss": 1.6031,
+ "step": 999
+ },
+ {
+ "epoch": 1.0002813291238162,
+ "grad_norm": 0.7066403031349182,
+ "learning_rate": 0.00014990919179718584,
+ "loss": 1.3663,
+ "step": 1000
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 2997,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 3,
+ "save_steps": 500,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 8104306689589248.0,
+ "train_batch_size": 2,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-1000/training_args.bin b/checkpoint-1000/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..6831a6d623a8a2b84942bb5584c6aa5bc14eee51
--- /dev/null
+++ b/checkpoint-1000/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5db131d6e82df60077bab037ec35113e1b0836a0bd72bb0a21e3fc0311a527de
+size 5304
diff --git a/checkpoint-1500/config.json b/checkpoint-1500/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..49d4bd1e1961ef7daf9af9a7dbe539789d58d949
--- /dev/null
+++ b/checkpoint-1500/config.json
@@ -0,0 +1,35 @@
+{
+ "_name_or_path": "facebook/nllb-200-distilled-600M",
+ "activation_dropout": 0.0,
+ "activation_function": "relu",
+ "architectures": [
+ "M2M100ForConditionalGeneration"
+ ],
+ "attention_dropout": 0.1,
+ "bos_token_id": 0,
+ "d_model": 1024,
+ "decoder_attention_heads": 16,
+ "decoder_ffn_dim": 4096,
+ "decoder_layerdrop": 0,
+ "decoder_layers": 12,
+ "decoder_start_token_id": 2,
+ "dropout": 0.1,
+ "encoder_attention_heads": 16,
+ "encoder_ffn_dim": 4096,
+ "encoder_layerdrop": 0,
+ "encoder_layers": 12,
+ "eos_token_id": 2,
+ "init_std": 0.02,
+ "is_encoder_decoder": true,
+ "max_length": 200,
+ "max_position_embeddings": 1024,
+ "model_type": "m2m_100",
+ "num_hidden_layers": 12,
+ "pad_token_id": 1,
+ "scale_embedding": true,
+ "tokenizer_class": "NllbTokenizer",
+ "torch_dtype": "float32",
+ "transformers_version": "4.43.1",
+ "use_cache": true,
+ "vocab_size": 256206
+}
diff --git a/checkpoint-1500/generation_config.json b/checkpoint-1500/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..680d3e0504023804deeb427766576194a0f17d47
--- /dev/null
+++ b/checkpoint-1500/generation_config.json
@@ -0,0 +1,9 @@
+{
+ "_from_model_config": true,
+ "bos_token_id": 0,
+ "decoder_start_token_id": 2,
+ "eos_token_id": 2,
+ "max_length": 200,
+ "pad_token_id": 1,
+ "transformers_version": "4.43.1"
+}
diff --git a/checkpoint-1500/model.safetensors b/checkpoint-1500/model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..320823fedd8721e67c858d58b26cf7ed46974ed4
--- /dev/null
+++ b/checkpoint-1500/model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:26ea625fa483c42a74caf79d271d68d40f64b0833a8709c45f1859f58719b750
+size 2460354912
diff --git a/checkpoint-1500/optimizer.pt b/checkpoint-1500/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..043745a57ca992544a24b4a13223e05233a1ed0c
--- /dev/null
+++ b/checkpoint-1500/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1b807bbe66b30bf0c0bf03ddbd99393efc266dc7d305ae507d3ede92275eabf9
+size 5125261
diff --git a/checkpoint-1500/rng_state.pth b/checkpoint-1500/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..7880eec41ddee03eae312ac15d9efec8df4523c5
--- /dev/null
+++ b/checkpoint-1500/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c13c58e7b9fba91c8e7f5a32605f7bd9bd562ac67796cb00bdecf17567dcac66
+size 14244
diff --git a/checkpoint-1500/scheduler.pt b/checkpoint-1500/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..2520c858524e632a2e79a9782d61ec95a50c147e
--- /dev/null
+++ b/checkpoint-1500/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a014688320d66d9abcc1740699cc308c75b18610ef38f5975f168109cf9e7822
+size 1064
diff --git a/checkpoint-1500/sentencepiece.bpe.model b/checkpoint-1500/sentencepiece.bpe.model
new file mode 100644
index 0000000000000000000000000000000000000000..dc2262d3e1d375b235eb71c24119c8e73f85d4ad
--- /dev/null
+++ b/checkpoint-1500/sentencepiece.bpe.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:14bb8dfb35c0ffdea7bc01e56cea38b9e3d5efcdcb9c251d6b40538e1aab555a
+size 4852054
diff --git a/checkpoint-1500/special_tokens_map.json b/checkpoint-1500/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..770c6f4e25faf27bbc3878b806f2ecfb88c5169e
--- /dev/null
+++ b/checkpoint-1500/special_tokens_map.json
@@ -0,0 +1,255 @@
+{
+ "additional_special_tokens": [
+ "ace_Arab",
+ "ace_Latn",
+ "acm_Arab",
+ "acq_Arab",
+ "aeb_Arab",
+ "afr_Latn",
+ "ajp_Arab",
+ "aka_Latn",
+ "amh_Ethi",
+ "apc_Arab",
+ "arb_Arab",
+ "ars_Arab",
+ "ary_Arab",
+ "arz_Arab",
+ "asm_Beng",
+ "ast_Latn",
+ "awa_Deva",
+ "ayr_Latn",
+ "azb_Arab",
+ "azj_Latn",
+ "bak_Cyrl",
+ "bam_Latn",
+ "ban_Latn",
+ "bel_Cyrl",
+ "bem_Latn",
+ "ben_Beng",
+ "bho_Deva",
+ "bjn_Arab",
+ "bjn_Latn",
+ "bod_Tibt",
+ "bos_Latn",
+ "bug_Latn",
+ "bul_Cyrl",
+ "cat_Latn",
+ "ceb_Latn",
+ "ces_Latn",
+ "cjk_Latn",
+ "ckb_Arab",
+ "crh_Latn",
+ "cym_Latn",
+ "dan_Latn",
+ "deu_Latn",
+ "dik_Latn",
+ "dyu_Latn",
+ "dzo_Tibt",
+ "ell_Grek",
+ "eng_Latn",
+ "epo_Latn",
+ "est_Latn",
+ "eus_Latn",
+ "ewe_Latn",
+ "fao_Latn",
+ "pes_Arab",
+ "fij_Latn",
+ "fin_Latn",
+ "fon_Latn",
+ "fra_Latn",
+ "fur_Latn",
+ "fuv_Latn",
+ "gla_Latn",
+ "gle_Latn",
+ "glg_Latn",
+ "grn_Latn",
+ "guj_Gujr",
+ "hat_Latn",
+ "hau_Latn",
+ "heb_Hebr",
+ "hin_Deva",
+ "hne_Deva",
+ "hrv_Latn",
+ "hun_Latn",
+ "hye_Armn",
+ "ibo_Latn",
+ "ilo_Latn",
+ "ind_Latn",
+ "isl_Latn",
+ "ita_Latn",
+ "jav_Latn",
+ "jpn_Jpan",
+ "kab_Latn",
+ "kac_Latn",
+ "kam_Latn",
+ "kan_Knda",
+ "kas_Arab",
+ "kas_Deva",
+ "kat_Geor",
+ "knc_Arab",
+ "knc_Latn",
+ "kaz_Cyrl",
+ "kbp_Latn",
+ "kea_Latn",
+ "khm_Khmr",
+ "kik_Latn",
+ "kin_Latn",
+ "kir_Cyrl",
+ "kmb_Latn",
+ "kon_Latn",
+ "kor_Hang",
+ "kmr_Latn",
+ "lao_Laoo",
+ "lvs_Latn",
+ "lij_Latn",
+ "lim_Latn",
+ "lin_Latn",
+ "lit_Latn",
+ "lmo_Latn",
+ "ltg_Latn",
+ "ltz_Latn",
+ "lua_Latn",
+ "lug_Latn",
+ "luo_Latn",
+ "lus_Latn",
+ "mag_Deva",
+ "mai_Deva",
+ "mal_Mlym",
+ "mar_Deva",
+ "min_Latn",
+ "mkd_Cyrl",
+ "plt_Latn",
+ "mlt_Latn",
+ "mni_Beng",
+ "khk_Cyrl",
+ "mos_Latn",
+ "mri_Latn",
+ "zsm_Latn",
+ "mya_Mymr",
+ "nld_Latn",
+ "nno_Latn",
+ "nob_Latn",
+ "npi_Deva",
+ "nso_Latn",
+ "nus_Latn",
+ "nya_Latn",
+ "oci_Latn",
+ "gaz_Latn",
+ "ory_Orya",
+ "pag_Latn",
+ "pan_Guru",
+ "pap_Latn",
+ "pol_Latn",
+ "por_Latn",
+ "prs_Arab",
+ "pbt_Arab",
+ "quy_Latn",
+ "ron_Latn",
+ "run_Latn",
+ "rus_Cyrl",
+ "sag_Latn",
+ "san_Deva",
+ "sat_Beng",
+ "scn_Latn",
+ "shn_Mymr",
+ "sin_Sinh",
+ "slk_Latn",
+ "slv_Latn",
+ "smo_Latn",
+ "sna_Latn",
+ "snd_Arab",
+ "som_Latn",
+ "sot_Latn",
+ "spa_Latn",
+ "als_Latn",
+ "srd_Latn",
+ "srp_Cyrl",
+ "ssw_Latn",
+ "sun_Latn",
+ "swe_Latn",
+ "swh_Latn",
+ "szl_Latn",
+ "tam_Taml",
+ "tat_Cyrl",
+ "tel_Telu",
+ "tgk_Cyrl",
+ "tgl_Latn",
+ "tha_Thai",
+ "tir_Ethi",
+ "taq_Latn",
+ "taq_Tfng",
+ "tpi_Latn",
+ "tsn_Latn",
+ "tso_Latn",
+ "tuk_Latn",
+ "tum_Latn",
+ "tur_Latn",
+ "twi_Latn",
+ "tzm_Tfng",
+ "uig_Arab",
+ "ukr_Cyrl",
+ "umb_Latn",
+ "urd_Arab",
+ "uzn_Latn",
+ "vec_Latn",
+ "vie_Latn",
+ "war_Latn",
+ "wol_Latn",
+ "xho_Latn",
+ "ydd_Hebr",
+ "yor_Latn",
+ "yue_Hant",
+ "zho_Hans",
+ "zho_Hant",
+ "zul_Latn"
+ ],
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "cls_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "mask_token": {
+ "content": "",
+ "lstrip": true,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "sep_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-1500/tokenizer.json b/checkpoint-1500/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..98050e98b98364c06d83b3f41864076220cb8408
--- /dev/null
+++ b/checkpoint-1500/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3b39b25b0763a1dd69dec54081fafcf10770d9f2538a3bd975a0c4be6d60a9c2
+size 17331294
diff --git a/checkpoint-1500/tokenizer_config.json b/checkpoint-1500/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..f1424d3657c008568198b44be241646482e7e9f2
--- /dev/null
+++ b/checkpoint-1500/tokenizer_config.json
@@ -0,0 +1,1878 @@
+{
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "3": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256001": {
+ "content": "ace_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256002": {
+ "content": "ace_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256003": {
+ "content": "acm_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256004": {
+ "content": "acq_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256005": {
+ "content": "aeb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256006": {
+ "content": "afr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256007": {
+ "content": "ajp_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256008": {
+ "content": "aka_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256009": {
+ "content": "amh_Ethi",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256010": {
+ "content": "apc_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256011": {
+ "content": "arb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256012": {
+ "content": "ars_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256013": {
+ "content": "ary_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256014": {
+ "content": "arz_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256015": {
+ "content": "asm_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256016": {
+ "content": "ast_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256017": {
+ "content": "awa_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256018": {
+ "content": "ayr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256019": {
+ "content": "azb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256020": {
+ "content": "azj_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256021": {
+ "content": "bak_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256022": {
+ "content": "bam_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256023": {
+ "content": "ban_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256024": {
+ "content": "bel_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256025": {
+ "content": "bem_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256026": {
+ "content": "ben_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256027": {
+ "content": "bho_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256028": {
+ "content": "bjn_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256029": {
+ "content": "bjn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256030": {
+ "content": "bod_Tibt",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256031": {
+ "content": "bos_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256032": {
+ "content": "bug_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256033": {
+ "content": "bul_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256034": {
+ "content": "cat_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256035": {
+ "content": "ceb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256036": {
+ "content": "ces_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256037": {
+ "content": "cjk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256038": {
+ "content": "ckb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256039": {
+ "content": "crh_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256040": {
+ "content": "cym_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256041": {
+ "content": "dan_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256042": {
+ "content": "deu_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256043": {
+ "content": "dik_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256044": {
+ "content": "dyu_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256045": {
+ "content": "dzo_Tibt",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256046": {
+ "content": "ell_Grek",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256047": {
+ "content": "eng_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256048": {
+ "content": "epo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256049": {
+ "content": "est_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256050": {
+ "content": "eus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256051": {
+ "content": "ewe_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256052": {
+ "content": "fao_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256053": {
+ "content": "pes_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256054": {
+ "content": "fij_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256055": {
+ "content": "fin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256056": {
+ "content": "fon_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256057": {
+ "content": "fra_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256058": {
+ "content": "fur_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256059": {
+ "content": "fuv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256060": {
+ "content": "gla_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256061": {
+ "content": "gle_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256062": {
+ "content": "glg_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256063": {
+ "content": "grn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256064": {
+ "content": "guj_Gujr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256065": {
+ "content": "hat_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256066": {
+ "content": "hau_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256067": {
+ "content": "heb_Hebr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256068": {
+ "content": "hin_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256069": {
+ "content": "hne_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256070": {
+ "content": "hrv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256071": {
+ "content": "hun_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256072": {
+ "content": "hye_Armn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256073": {
+ "content": "ibo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256074": {
+ "content": "ilo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256075": {
+ "content": "ind_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256076": {
+ "content": "isl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256077": {
+ "content": "ita_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256078": {
+ "content": "jav_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256079": {
+ "content": "jpn_Jpan",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256080": {
+ "content": "kab_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256081": {
+ "content": "kac_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256082": {
+ "content": "kam_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256083": {
+ "content": "kan_Knda",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256084": {
+ "content": "kas_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256085": {
+ "content": "kas_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256086": {
+ "content": "kat_Geor",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256087": {
+ "content": "knc_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256088": {
+ "content": "knc_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256089": {
+ "content": "kaz_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256090": {
+ "content": "kbp_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256091": {
+ "content": "kea_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256092": {
+ "content": "khm_Khmr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256093": {
+ "content": "kik_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256094": {
+ "content": "kin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256095": {
+ "content": "kir_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256096": {
+ "content": "kmb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256097": {
+ "content": "kon_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256098": {
+ "content": "kor_Hang",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256099": {
+ "content": "kmr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256100": {
+ "content": "lao_Laoo",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256101": {
+ "content": "lvs_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256102": {
+ "content": "lij_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256103": {
+ "content": "lim_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256104": {
+ "content": "lin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256105": {
+ "content": "lit_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256106": {
+ "content": "lmo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256107": {
+ "content": "ltg_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256108": {
+ "content": "ltz_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256109": {
+ "content": "lua_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256110": {
+ "content": "lug_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256111": {
+ "content": "luo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256112": {
+ "content": "lus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256113": {
+ "content": "mag_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256114": {
+ "content": "mai_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256115": {
+ "content": "mal_Mlym",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256116": {
+ "content": "mar_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256117": {
+ "content": "min_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256118": {
+ "content": "mkd_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256119": {
+ "content": "plt_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256120": {
+ "content": "mlt_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256121": {
+ "content": "mni_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256122": {
+ "content": "khk_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256123": {
+ "content": "mos_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256124": {
+ "content": "mri_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256125": {
+ "content": "zsm_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256126": {
+ "content": "mya_Mymr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256127": {
+ "content": "nld_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256128": {
+ "content": "nno_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256129": {
+ "content": "nob_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256130": {
+ "content": "npi_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256131": {
+ "content": "nso_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256132": {
+ "content": "nus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256133": {
+ "content": "nya_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256134": {
+ "content": "oci_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256135": {
+ "content": "gaz_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256136": {
+ "content": "ory_Orya",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256137": {
+ "content": "pag_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256138": {
+ "content": "pan_Guru",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256139": {
+ "content": "pap_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256140": {
+ "content": "pol_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256141": {
+ "content": "por_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256142": {
+ "content": "prs_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256143": {
+ "content": "pbt_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256144": {
+ "content": "quy_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256145": {
+ "content": "ron_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256146": {
+ "content": "run_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256147": {
+ "content": "rus_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256148": {
+ "content": "sag_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256149": {
+ "content": "san_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256150": {
+ "content": "sat_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256151": {
+ "content": "scn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256152": {
+ "content": "shn_Mymr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256153": {
+ "content": "sin_Sinh",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256154": {
+ "content": "slk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256155": {
+ "content": "slv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256156": {
+ "content": "smo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256157": {
+ "content": "sna_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256158": {
+ "content": "snd_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256159": {
+ "content": "som_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256160": {
+ "content": "sot_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256161": {
+ "content": "spa_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256162": {
+ "content": "als_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256163": {
+ "content": "srd_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256164": {
+ "content": "srp_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256165": {
+ "content": "ssw_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256166": {
+ "content": "sun_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256167": {
+ "content": "swe_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256168": {
+ "content": "swh_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256169": {
+ "content": "szl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256170": {
+ "content": "tam_Taml",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256171": {
+ "content": "tat_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256172": {
+ "content": "tel_Telu",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256173": {
+ "content": "tgk_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256174": {
+ "content": "tgl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256175": {
+ "content": "tha_Thai",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256176": {
+ "content": "tir_Ethi",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256177": {
+ "content": "taq_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256178": {
+ "content": "taq_Tfng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256179": {
+ "content": "tpi_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256180": {
+ "content": "tsn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256181": {
+ "content": "tso_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256182": {
+ "content": "tuk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256183": {
+ "content": "tum_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256184": {
+ "content": "tur_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256185": {
+ "content": "twi_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256186": {
+ "content": "tzm_Tfng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256187": {
+ "content": "uig_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256188": {
+ "content": "ukr_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256189": {
+ "content": "umb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256190": {
+ "content": "urd_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256191": {
+ "content": "uzn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256192": {
+ "content": "vec_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256193": {
+ "content": "vie_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256194": {
+ "content": "war_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256195": {
+ "content": "wol_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256196": {
+ "content": "xho_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256197": {
+ "content": "ydd_Hebr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256198": {
+ "content": "yor_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256199": {
+ "content": "yue_Hant",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256200": {
+ "content": "zho_Hans",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256201": {
+ "content": "zho_Hant",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256202": {
+ "content": "zul_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256203": {
+ "content": "",
+ "lstrip": true,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "additional_special_tokens": [
+ "ace_Arab",
+ "ace_Latn",
+ "acm_Arab",
+ "acq_Arab",
+ "aeb_Arab",
+ "afr_Latn",
+ "ajp_Arab",
+ "aka_Latn",
+ "amh_Ethi",
+ "apc_Arab",
+ "arb_Arab",
+ "ars_Arab",
+ "ary_Arab",
+ "arz_Arab",
+ "asm_Beng",
+ "ast_Latn",
+ "awa_Deva",
+ "ayr_Latn",
+ "azb_Arab",
+ "azj_Latn",
+ "bak_Cyrl",
+ "bam_Latn",
+ "ban_Latn",
+ "bel_Cyrl",
+ "bem_Latn",
+ "ben_Beng",
+ "bho_Deva",
+ "bjn_Arab",
+ "bjn_Latn",
+ "bod_Tibt",
+ "bos_Latn",
+ "bug_Latn",
+ "bul_Cyrl",
+ "cat_Latn",
+ "ceb_Latn",
+ "ces_Latn",
+ "cjk_Latn",
+ "ckb_Arab",
+ "crh_Latn",
+ "cym_Latn",
+ "dan_Latn",
+ "deu_Latn",
+ "dik_Latn",
+ "dyu_Latn",
+ "dzo_Tibt",
+ "ell_Grek",
+ "eng_Latn",
+ "epo_Latn",
+ "est_Latn",
+ "eus_Latn",
+ "ewe_Latn",
+ "fao_Latn",
+ "pes_Arab",
+ "fij_Latn",
+ "fin_Latn",
+ "fon_Latn",
+ "fra_Latn",
+ "fur_Latn",
+ "fuv_Latn",
+ "gla_Latn",
+ "gle_Latn",
+ "glg_Latn",
+ "grn_Latn",
+ "guj_Gujr",
+ "hat_Latn",
+ "hau_Latn",
+ "heb_Hebr",
+ "hin_Deva",
+ "hne_Deva",
+ "hrv_Latn",
+ "hun_Latn",
+ "hye_Armn",
+ "ibo_Latn",
+ "ilo_Latn",
+ "ind_Latn",
+ "isl_Latn",
+ "ita_Latn",
+ "jav_Latn",
+ "jpn_Jpan",
+ "kab_Latn",
+ "kac_Latn",
+ "kam_Latn",
+ "kan_Knda",
+ "kas_Arab",
+ "kas_Deva",
+ "kat_Geor",
+ "knc_Arab",
+ "knc_Latn",
+ "kaz_Cyrl",
+ "kbp_Latn",
+ "kea_Latn",
+ "khm_Khmr",
+ "kik_Latn",
+ "kin_Latn",
+ "kir_Cyrl",
+ "kmb_Latn",
+ "kon_Latn",
+ "kor_Hang",
+ "kmr_Latn",
+ "lao_Laoo",
+ "lvs_Latn",
+ "lij_Latn",
+ "lim_Latn",
+ "lin_Latn",
+ "lit_Latn",
+ "lmo_Latn",
+ "ltg_Latn",
+ "ltz_Latn",
+ "lua_Latn",
+ "lug_Latn",
+ "luo_Latn",
+ "lus_Latn",
+ "mag_Deva",
+ "mai_Deva",
+ "mal_Mlym",
+ "mar_Deva",
+ "min_Latn",
+ "mkd_Cyrl",
+ "plt_Latn",
+ "mlt_Latn",
+ "mni_Beng",
+ "khk_Cyrl",
+ "mos_Latn",
+ "mri_Latn",
+ "zsm_Latn",
+ "mya_Mymr",
+ "nld_Latn",
+ "nno_Latn",
+ "nob_Latn",
+ "npi_Deva",
+ "nso_Latn",
+ "nus_Latn",
+ "nya_Latn",
+ "oci_Latn",
+ "gaz_Latn",
+ "ory_Orya",
+ "pag_Latn",
+ "pan_Guru",
+ "pap_Latn",
+ "pol_Latn",
+ "por_Latn",
+ "prs_Arab",
+ "pbt_Arab",
+ "quy_Latn",
+ "ron_Latn",
+ "run_Latn",
+ "rus_Cyrl",
+ "sag_Latn",
+ "san_Deva",
+ "sat_Beng",
+ "scn_Latn",
+ "shn_Mymr",
+ "sin_Sinh",
+ "slk_Latn",
+ "slv_Latn",
+ "smo_Latn",
+ "sna_Latn",
+ "snd_Arab",
+ "som_Latn",
+ "sot_Latn",
+ "spa_Latn",
+ "als_Latn",
+ "srd_Latn",
+ "srp_Cyrl",
+ "ssw_Latn",
+ "sun_Latn",
+ "swe_Latn",
+ "swh_Latn",
+ "szl_Latn",
+ "tam_Taml",
+ "tat_Cyrl",
+ "tel_Telu",
+ "tgk_Cyrl",
+ "tgl_Latn",
+ "tha_Thai",
+ "tir_Ethi",
+ "taq_Latn",
+ "taq_Tfng",
+ "tpi_Latn",
+ "tsn_Latn",
+ "tso_Latn",
+ "tuk_Latn",
+ "tum_Latn",
+ "tur_Latn",
+ "twi_Latn",
+ "tzm_Tfng",
+ "uig_Arab",
+ "ukr_Cyrl",
+ "umb_Latn",
+ "urd_Arab",
+ "uzn_Latn",
+ "vec_Latn",
+ "vie_Latn",
+ "war_Latn",
+ "wol_Latn",
+ "xho_Latn",
+ "ydd_Hebr",
+ "yor_Latn",
+ "yue_Hant",
+ "zho_Hans",
+ "zho_Hant",
+ "zul_Latn"
+ ],
+ "bos_token": "",
+ "clean_up_tokenization_spaces": true,
+ "cls_token": "",
+ "eos_token": "",
+ "legacy_behaviour": false,
+ "mask_token": "",
+ "model_max_length": 1024,
+ "pad_token": "",
+ "sep_token": "",
+ "sp_model_kwargs": {},
+ "src_lang": "eng_Latn",
+ "tgt_lang": null,
+ "tokenizer_class": "NllbTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-1500/trainer_state.json b/checkpoint-1500/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..d91d3a3a68af52bb07594bf760272bcd6c7f0650
--- /dev/null
+++ b/checkpoint-1500/trainer_state.json
@@ -0,0 +1,10533 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.5004219936857242,
+ "eval_steps": 500,
+ "global_step": 1500,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.001000281329123816,
+ "grad_norm": 7.503077030181885,
+ "learning_rate": 0.0001999999450590425,
+ "loss": 3.6513,
+ "step": 1
+ },
+ {
+ "epoch": 0.002000562658247632,
+ "grad_norm": 3.1191189289093018,
+ "learning_rate": 0.00019999978023623033,
+ "loss": 2.8683,
+ "step": 2
+ },
+ {
+ "epoch": 0.003000843987371448,
+ "grad_norm": 1.9282511472702026,
+ "learning_rate": 0.0001999995055317446,
+ "loss": 2.7882,
+ "step": 3
+ },
+ {
+ "epoch": 0.004001125316495264,
+ "grad_norm": 1.726026177406311,
+ "learning_rate": 0.00019999912094588717,
+ "loss": 2.7716,
+ "step": 4
+ },
+ {
+ "epoch": 0.005001406645619081,
+ "grad_norm": 1.4632996320724487,
+ "learning_rate": 0.00019999862647908064,
+ "loss": 2.6869,
+ "step": 5
+ },
+ {
+ "epoch": 0.006001687974742896,
+ "grad_norm": 1.5544543266296387,
+ "learning_rate": 0.00019999802213186834,
+ "loss": 2.6952,
+ "step": 6
+ },
+ {
+ "epoch": 0.007001969303866712,
+ "grad_norm": 1.5888980627059937,
+ "learning_rate": 0.0001999973079049143,
+ "loss": 2.1237,
+ "step": 7
+ },
+ {
+ "epoch": 0.008002250632990529,
+ "grad_norm": 1.8750641345977783,
+ "learning_rate": 0.00019999648379900338,
+ "loss": 2.3376,
+ "step": 8
+ },
+ {
+ "epoch": 0.009002531962114344,
+ "grad_norm": 1.0540648698806763,
+ "learning_rate": 0.0001999955498150411,
+ "loss": 2.4896,
+ "step": 9
+ },
+ {
+ "epoch": 0.010002813291238161,
+ "grad_norm": 1.0269274711608887,
+ "learning_rate": 0.00019999450595405374,
+ "loss": 2.1365,
+ "step": 10
+ },
+ {
+ "epoch": 0.011003094620361977,
+ "grad_norm": 1.0851730108261108,
+ "learning_rate": 0.0001999933522171883,
+ "loss": 2.235,
+ "step": 11
+ },
+ {
+ "epoch": 0.012003375949485792,
+ "grad_norm": 0.927042543888092,
+ "learning_rate": 0.00019999208860571255,
+ "loss": 2.2438,
+ "step": 12
+ },
+ {
+ "epoch": 0.01300365727860961,
+ "grad_norm": 1.3729208707809448,
+ "learning_rate": 0.00019999071512101496,
+ "loss": 2.0845,
+ "step": 13
+ },
+ {
+ "epoch": 0.014003938607733425,
+ "grad_norm": 1.1325910091400146,
+ "learning_rate": 0.00019998923176460474,
+ "loss": 2.0668,
+ "step": 14
+ },
+ {
+ "epoch": 0.01500421993685724,
+ "grad_norm": 0.9290457367897034,
+ "learning_rate": 0.00019998763853811184,
+ "loss": 2.0227,
+ "step": 15
+ },
+ {
+ "epoch": 0.016004501265981057,
+ "grad_norm": 0.942140519618988,
+ "learning_rate": 0.00019998593544328692,
+ "loss": 2.1598,
+ "step": 16
+ },
+ {
+ "epoch": 0.017004782595104875,
+ "grad_norm": 1.096635103225708,
+ "learning_rate": 0.00019998412248200138,
+ "loss": 2.1897,
+ "step": 17
+ },
+ {
+ "epoch": 0.01800506392422869,
+ "grad_norm": 1.1107186079025269,
+ "learning_rate": 0.00019998219965624734,
+ "loss": 2.0546,
+ "step": 18
+ },
+ {
+ "epoch": 0.019005345253352506,
+ "grad_norm": 0.9696593880653381,
+ "learning_rate": 0.0001999801669681376,
+ "loss": 2.0317,
+ "step": 19
+ },
+ {
+ "epoch": 0.020005626582476323,
+ "grad_norm": 0.9394300580024719,
+ "learning_rate": 0.00019997802441990573,
+ "loss": 2.2883,
+ "step": 20
+ },
+ {
+ "epoch": 0.021005907911600136,
+ "grad_norm": 1.08865225315094,
+ "learning_rate": 0.00019997577201390606,
+ "loss": 1.9838,
+ "step": 21
+ },
+ {
+ "epoch": 0.022006189240723954,
+ "grad_norm": 1.0712405443191528,
+ "learning_rate": 0.00019997340975261353,
+ "loss": 2.1177,
+ "step": 22
+ },
+ {
+ "epoch": 0.02300647056984777,
+ "grad_norm": 1.3190314769744873,
+ "learning_rate": 0.00019997093763862383,
+ "loss": 1.9755,
+ "step": 23
+ },
+ {
+ "epoch": 0.024006751898971584,
+ "grad_norm": 1.0659812688827515,
+ "learning_rate": 0.0001999683556746534,
+ "loss": 1.9829,
+ "step": 24
+ },
+ {
+ "epoch": 0.0250070332280954,
+ "grad_norm": 1.1824345588684082,
+ "learning_rate": 0.0001999656638635393,
+ "loss": 2.4219,
+ "step": 25
+ },
+ {
+ "epoch": 0.02600731455721922,
+ "grad_norm": 1.3446214199066162,
+ "learning_rate": 0.0001999628622082394,
+ "loss": 1.9644,
+ "step": 26
+ },
+ {
+ "epoch": 0.027007595886343033,
+ "grad_norm": 1.2527475357055664,
+ "learning_rate": 0.0001999599507118322,
+ "loss": 2.1889,
+ "step": 27
+ },
+ {
+ "epoch": 0.02800787721546685,
+ "grad_norm": 1.4738999605178833,
+ "learning_rate": 0.00019995692937751683,
+ "loss": 2.1949,
+ "step": 28
+ },
+ {
+ "epoch": 0.029008158544590667,
+ "grad_norm": 1.0533576011657715,
+ "learning_rate": 0.0001999537982086133,
+ "loss": 2.1034,
+ "step": 29
+ },
+ {
+ "epoch": 0.03000843987371448,
+ "grad_norm": 1.0343223810195923,
+ "learning_rate": 0.00019995055720856218,
+ "loss": 1.9561,
+ "step": 30
+ },
+ {
+ "epoch": 0.031008721202838298,
+ "grad_norm": 1.1149976253509521,
+ "learning_rate": 0.00019994720638092468,
+ "loss": 2.0981,
+ "step": 31
+ },
+ {
+ "epoch": 0.032009002531962115,
+ "grad_norm": 1.197178840637207,
+ "learning_rate": 0.00019994374572938277,
+ "loss": 2.1587,
+ "step": 32
+ },
+ {
+ "epoch": 0.03300928386108593,
+ "grad_norm": 0.9382303953170776,
+ "learning_rate": 0.00019994017525773913,
+ "loss": 1.869,
+ "step": 33
+ },
+ {
+ "epoch": 0.03400956519020975,
+ "grad_norm": 1.0526461601257324,
+ "learning_rate": 0.00019993649496991705,
+ "loss": 1.9045,
+ "step": 34
+ },
+ {
+ "epoch": 0.03500984651933356,
+ "grad_norm": 0.8510498404502869,
+ "learning_rate": 0.00019993270486996046,
+ "loss": 2.1005,
+ "step": 35
+ },
+ {
+ "epoch": 0.03601012784845738,
+ "grad_norm": 0.9990401268005371,
+ "learning_rate": 0.000199928804962034,
+ "loss": 1.8569,
+ "step": 36
+ },
+ {
+ "epoch": 0.037010409177581194,
+ "grad_norm": 0.9243854284286499,
+ "learning_rate": 0.00019992479525042303,
+ "loss": 1.9666,
+ "step": 37
+ },
+ {
+ "epoch": 0.03801069050670501,
+ "grad_norm": 0.7774227261543274,
+ "learning_rate": 0.00019992067573953342,
+ "loss": 2.0376,
+ "step": 38
+ },
+ {
+ "epoch": 0.03901097183582883,
+ "grad_norm": 0.8114833235740662,
+ "learning_rate": 0.0001999164464338918,
+ "loss": 2.1608,
+ "step": 39
+ },
+ {
+ "epoch": 0.040011253164952645,
+ "grad_norm": 0.8716320395469666,
+ "learning_rate": 0.0001999121073381454,
+ "loss": 2.0743,
+ "step": 40
+ },
+ {
+ "epoch": 0.041011534494076456,
+ "grad_norm": 0.9571239948272705,
+ "learning_rate": 0.0001999076584570621,
+ "loss": 2.0128,
+ "step": 41
+ },
+ {
+ "epoch": 0.04201181582320027,
+ "grad_norm": 1.038691520690918,
+ "learning_rate": 0.00019990309979553045,
+ "loss": 1.976,
+ "step": 42
+ },
+ {
+ "epoch": 0.04301209715232409,
+ "grad_norm": 1.0576292276382446,
+ "learning_rate": 0.00019989843135855958,
+ "loss": 1.94,
+ "step": 43
+ },
+ {
+ "epoch": 0.04401237848144791,
+ "grad_norm": 1.0991204977035522,
+ "learning_rate": 0.00019989365315127922,
+ "loss": 1.9397,
+ "step": 44
+ },
+ {
+ "epoch": 0.045012659810571724,
+ "grad_norm": 0.9268686175346375,
+ "learning_rate": 0.0001998887651789398,
+ "loss": 1.9305,
+ "step": 45
+ },
+ {
+ "epoch": 0.04601294113969554,
+ "grad_norm": 0.8459104299545288,
+ "learning_rate": 0.0001998837674469123,
+ "loss": 1.7941,
+ "step": 46
+ },
+ {
+ "epoch": 0.04701322246881936,
+ "grad_norm": 0.9260527491569519,
+ "learning_rate": 0.00019987865996068833,
+ "loss": 1.8843,
+ "step": 47
+ },
+ {
+ "epoch": 0.04801350379794317,
+ "grad_norm": 0.8370497226715088,
+ "learning_rate": 0.00019987344272588006,
+ "loss": 1.8779,
+ "step": 48
+ },
+ {
+ "epoch": 0.049013785127066986,
+ "grad_norm": 0.9228008389472961,
+ "learning_rate": 0.00019986811574822033,
+ "loss": 2.1713,
+ "step": 49
+ },
+ {
+ "epoch": 0.0500140664561908,
+ "grad_norm": 1.013746738433838,
+ "learning_rate": 0.00019986267903356254,
+ "loss": 2.1443,
+ "step": 50
+ },
+ {
+ "epoch": 0.05101434778531462,
+ "grad_norm": 1.0155737400054932,
+ "learning_rate": 0.0001998571325878806,
+ "loss": 1.9679,
+ "step": 51
+ },
+ {
+ "epoch": 0.05201462911443844,
+ "grad_norm": 0.9591345191001892,
+ "learning_rate": 0.0001998514764172691,
+ "loss": 2.0611,
+ "step": 52
+ },
+ {
+ "epoch": 0.053014910443562255,
+ "grad_norm": 0.9030050039291382,
+ "learning_rate": 0.00019984571052794313,
+ "loss": 1.9698,
+ "step": 53
+ },
+ {
+ "epoch": 0.054015191772686065,
+ "grad_norm": 0.7697799205780029,
+ "learning_rate": 0.00019983983492623833,
+ "loss": 2.0609,
+ "step": 54
+ },
+ {
+ "epoch": 0.05501547310180988,
+ "grad_norm": 0.8806005716323853,
+ "learning_rate": 0.00019983384961861096,
+ "loss": 1.9756,
+ "step": 55
+ },
+ {
+ "epoch": 0.0560157544309337,
+ "grad_norm": 0.9424449801445007,
+ "learning_rate": 0.0001998277546116378,
+ "loss": 2.0913,
+ "step": 56
+ },
+ {
+ "epoch": 0.05701603576005752,
+ "grad_norm": 1.139495849609375,
+ "learning_rate": 0.00019982154991201608,
+ "loss": 2.2524,
+ "step": 57
+ },
+ {
+ "epoch": 0.058016317089181334,
+ "grad_norm": 1.094347357749939,
+ "learning_rate": 0.00019981523552656377,
+ "loss": 1.8501,
+ "step": 58
+ },
+ {
+ "epoch": 0.05901659841830515,
+ "grad_norm": 1.1519278287887573,
+ "learning_rate": 0.00019980881146221914,
+ "loss": 1.9866,
+ "step": 59
+ },
+ {
+ "epoch": 0.06001687974742896,
+ "grad_norm": 1.2018250226974487,
+ "learning_rate": 0.00019980227772604112,
+ "loss": 1.8226,
+ "step": 60
+ },
+ {
+ "epoch": 0.06101716107655278,
+ "grad_norm": 0.9565753936767578,
+ "learning_rate": 0.0001997956343252091,
+ "loss": 1.8434,
+ "step": 61
+ },
+ {
+ "epoch": 0.062017442405676595,
+ "grad_norm": 1.0832768678665161,
+ "learning_rate": 0.00019978888126702296,
+ "loss": 2.1271,
+ "step": 62
+ },
+ {
+ "epoch": 0.06301772373480041,
+ "grad_norm": 0.8973837494850159,
+ "learning_rate": 0.00019978201855890308,
+ "loss": 1.8331,
+ "step": 63
+ },
+ {
+ "epoch": 0.06401800506392423,
+ "grad_norm": 0.8754604458808899,
+ "learning_rate": 0.00019977504620839035,
+ "loss": 2.1379,
+ "step": 64
+ },
+ {
+ "epoch": 0.06501828639304805,
+ "grad_norm": 0.8244839310646057,
+ "learning_rate": 0.00019976796422314615,
+ "loss": 1.8431,
+ "step": 65
+ },
+ {
+ "epoch": 0.06601856772217186,
+ "grad_norm": 0.8213551044464111,
+ "learning_rate": 0.00019976077261095226,
+ "loss": 1.9155,
+ "step": 66
+ },
+ {
+ "epoch": 0.06701884905129568,
+ "grad_norm": 0.9140985608100891,
+ "learning_rate": 0.00019975347137971098,
+ "loss": 2.0651,
+ "step": 67
+ },
+ {
+ "epoch": 0.0680191303804195,
+ "grad_norm": 0.8518921732902527,
+ "learning_rate": 0.00019974606053744503,
+ "loss": 1.8197,
+ "step": 68
+ },
+ {
+ "epoch": 0.06901941170954332,
+ "grad_norm": 0.8397145867347717,
+ "learning_rate": 0.00019973854009229763,
+ "loss": 1.8621,
+ "step": 69
+ },
+ {
+ "epoch": 0.07001969303866712,
+ "grad_norm": 0.8727964162826538,
+ "learning_rate": 0.00019973091005253232,
+ "loss": 1.762,
+ "step": 70
+ },
+ {
+ "epoch": 0.07101997436779094,
+ "grad_norm": 0.9284623265266418,
+ "learning_rate": 0.0001997231704265332,
+ "loss": 1.8675,
+ "step": 71
+ },
+ {
+ "epoch": 0.07202025569691475,
+ "grad_norm": 0.8280015587806702,
+ "learning_rate": 0.00019971532122280464,
+ "loss": 1.931,
+ "step": 72
+ },
+ {
+ "epoch": 0.07302053702603857,
+ "grad_norm": 0.7591394186019897,
+ "learning_rate": 0.0001997073624499716,
+ "loss": 1.8485,
+ "step": 73
+ },
+ {
+ "epoch": 0.07402081835516239,
+ "grad_norm": 0.975128710269928,
+ "learning_rate": 0.0001996992941167792,
+ "loss": 2.0784,
+ "step": 74
+ },
+ {
+ "epoch": 0.0750210996842862,
+ "grad_norm": 0.8034948110580444,
+ "learning_rate": 0.00019969111623209323,
+ "loss": 1.9849,
+ "step": 75
+ },
+ {
+ "epoch": 0.07602138101341002,
+ "grad_norm": 0.8540483713150024,
+ "learning_rate": 0.00019968282880489957,
+ "loss": 1.7832,
+ "step": 76
+ },
+ {
+ "epoch": 0.07702166234253384,
+ "grad_norm": 0.8181695342063904,
+ "learning_rate": 0.00019967443184430467,
+ "loss": 1.944,
+ "step": 77
+ },
+ {
+ "epoch": 0.07802194367165766,
+ "grad_norm": 0.8446747064590454,
+ "learning_rate": 0.0001996659253595353,
+ "loss": 1.8508,
+ "step": 78
+ },
+ {
+ "epoch": 0.07902222500078147,
+ "grad_norm": 0.8280364871025085,
+ "learning_rate": 0.0001996573093599385,
+ "loss": 1.843,
+ "step": 79
+ },
+ {
+ "epoch": 0.08002250632990529,
+ "grad_norm": 0.8016006350517273,
+ "learning_rate": 0.00019964858385498172,
+ "loss": 1.9368,
+ "step": 80
+ },
+ {
+ "epoch": 0.08102278765902911,
+ "grad_norm": 0.8450536131858826,
+ "learning_rate": 0.00019963974885425266,
+ "loss": 1.9736,
+ "step": 81
+ },
+ {
+ "epoch": 0.08202306898815291,
+ "grad_norm": 0.9172171950340271,
+ "learning_rate": 0.00019963080436745945,
+ "loss": 1.9382,
+ "step": 82
+ },
+ {
+ "epoch": 0.08302335031727673,
+ "grad_norm": 0.8581916689872742,
+ "learning_rate": 0.00019962175040443044,
+ "loss": 2.224,
+ "step": 83
+ },
+ {
+ "epoch": 0.08402363164640055,
+ "grad_norm": 0.9350367188453674,
+ "learning_rate": 0.0001996125869751143,
+ "loss": 2.0519,
+ "step": 84
+ },
+ {
+ "epoch": 0.08502391297552436,
+ "grad_norm": 0.9276247620582581,
+ "learning_rate": 0.00019960331408957997,
+ "loss": 1.9657,
+ "step": 85
+ },
+ {
+ "epoch": 0.08602419430464818,
+ "grad_norm": 0.871574342250824,
+ "learning_rate": 0.00019959393175801671,
+ "loss": 1.9399,
+ "step": 86
+ },
+ {
+ "epoch": 0.087024475633772,
+ "grad_norm": 1.0662888288497925,
+ "learning_rate": 0.00019958443999073397,
+ "loss": 1.9089,
+ "step": 87
+ },
+ {
+ "epoch": 0.08802475696289581,
+ "grad_norm": 0.8258713483810425,
+ "learning_rate": 0.00019957483879816151,
+ "loss": 1.839,
+ "step": 88
+ },
+ {
+ "epoch": 0.08902503829201963,
+ "grad_norm": 0.8154664039611816,
+ "learning_rate": 0.00019956512819084928,
+ "loss": 1.8409,
+ "step": 89
+ },
+ {
+ "epoch": 0.09002531962114345,
+ "grad_norm": 0.8584638833999634,
+ "learning_rate": 0.00019955530817946748,
+ "loss": 1.9521,
+ "step": 90
+ },
+ {
+ "epoch": 0.09102560095026727,
+ "grad_norm": 0.7917523384094238,
+ "learning_rate": 0.00019954537877480655,
+ "loss": 1.9495,
+ "step": 91
+ },
+ {
+ "epoch": 0.09202588227939108,
+ "grad_norm": 1.0129039287567139,
+ "learning_rate": 0.00019953533998777706,
+ "loss": 1.949,
+ "step": 92
+ },
+ {
+ "epoch": 0.0930261636085149,
+ "grad_norm": 0.8677986264228821,
+ "learning_rate": 0.00019952519182940993,
+ "loss": 1.7875,
+ "step": 93
+ },
+ {
+ "epoch": 0.09402644493763872,
+ "grad_norm": 0.8848614692687988,
+ "learning_rate": 0.00019951493431085603,
+ "loss": 2.0675,
+ "step": 94
+ },
+ {
+ "epoch": 0.09502672626676252,
+ "grad_norm": 0.9936463832855225,
+ "learning_rate": 0.00019950456744338658,
+ "loss": 1.6761,
+ "step": 95
+ },
+ {
+ "epoch": 0.09602700759588634,
+ "grad_norm": 1.0520148277282715,
+ "learning_rate": 0.00019949409123839288,
+ "loss": 2.081,
+ "step": 96
+ },
+ {
+ "epoch": 0.09702728892501016,
+ "grad_norm": 0.8061773180961609,
+ "learning_rate": 0.00019948350570738642,
+ "loss": 1.7281,
+ "step": 97
+ },
+ {
+ "epoch": 0.09802757025413397,
+ "grad_norm": 0.7642756104469299,
+ "learning_rate": 0.0001994728108619987,
+ "loss": 2.0032,
+ "step": 98
+ },
+ {
+ "epoch": 0.09902785158325779,
+ "grad_norm": 0.8541550040245056,
+ "learning_rate": 0.0001994620067139815,
+ "loss": 2.1136,
+ "step": 99
+ },
+ {
+ "epoch": 0.1000281329123816,
+ "grad_norm": 0.7868679761886597,
+ "learning_rate": 0.00019945109327520658,
+ "loss": 1.8695,
+ "step": 100
+ },
+ {
+ "epoch": 0.10102841424150542,
+ "grad_norm": 0.8776901364326477,
+ "learning_rate": 0.00019944007055766586,
+ "loss": 1.9786,
+ "step": 101
+ },
+ {
+ "epoch": 0.10202869557062924,
+ "grad_norm": 0.9013833999633789,
+ "learning_rate": 0.00019942893857347128,
+ "loss": 2.1466,
+ "step": 102
+ },
+ {
+ "epoch": 0.10302897689975306,
+ "grad_norm": 0.957558274269104,
+ "learning_rate": 0.00019941769733485494,
+ "loss": 2.0473,
+ "step": 103
+ },
+ {
+ "epoch": 0.10402925822887688,
+ "grad_norm": 0.8921108841896057,
+ "learning_rate": 0.00019940634685416888,
+ "loss": 1.7882,
+ "step": 104
+ },
+ {
+ "epoch": 0.10502953955800069,
+ "grad_norm": 0.896019697189331,
+ "learning_rate": 0.00019939488714388524,
+ "loss": 1.8811,
+ "step": 105
+ },
+ {
+ "epoch": 0.10602982088712451,
+ "grad_norm": 0.8792067766189575,
+ "learning_rate": 0.00019938331821659614,
+ "loss": 1.8624,
+ "step": 106
+ },
+ {
+ "epoch": 0.10703010221624833,
+ "grad_norm": 0.8739930391311646,
+ "learning_rate": 0.0001993716400850138,
+ "loss": 1.8105,
+ "step": 107
+ },
+ {
+ "epoch": 0.10803038354537213,
+ "grad_norm": 0.7678424715995789,
+ "learning_rate": 0.0001993598527619703,
+ "loss": 1.8772,
+ "step": 108
+ },
+ {
+ "epoch": 0.10903066487449595,
+ "grad_norm": 0.8718745112419128,
+ "learning_rate": 0.00019934795626041783,
+ "loss": 1.8236,
+ "step": 109
+ },
+ {
+ "epoch": 0.11003094620361976,
+ "grad_norm": 0.8467247486114502,
+ "learning_rate": 0.0001993359505934285,
+ "loss": 1.8188,
+ "step": 110
+ },
+ {
+ "epoch": 0.11103122753274358,
+ "grad_norm": 0.8685783743858337,
+ "learning_rate": 0.00019932383577419432,
+ "loss": 2.0775,
+ "step": 111
+ },
+ {
+ "epoch": 0.1120315088618674,
+ "grad_norm": 0.7799698710441589,
+ "learning_rate": 0.0001993116118160273,
+ "loss": 1.6489,
+ "step": 112
+ },
+ {
+ "epoch": 0.11303179019099122,
+ "grad_norm": 0.7900094389915466,
+ "learning_rate": 0.00019929927873235938,
+ "loss": 1.8332,
+ "step": 113
+ },
+ {
+ "epoch": 0.11403207152011503,
+ "grad_norm": 0.9433258771896362,
+ "learning_rate": 0.00019928683653674237,
+ "loss": 1.9331,
+ "step": 114
+ },
+ {
+ "epoch": 0.11503235284923885,
+ "grad_norm": 0.8861056566238403,
+ "learning_rate": 0.00019927428524284805,
+ "loss": 1.9135,
+ "step": 115
+ },
+ {
+ "epoch": 0.11603263417836267,
+ "grad_norm": 0.8566756844520569,
+ "learning_rate": 0.00019926162486446792,
+ "loss": 1.9874,
+ "step": 116
+ },
+ {
+ "epoch": 0.11703291550748648,
+ "grad_norm": 0.6897929310798645,
+ "learning_rate": 0.0001992488554155135,
+ "loss": 1.946,
+ "step": 117
+ },
+ {
+ "epoch": 0.1180331968366103,
+ "grad_norm": 0.7807729244232178,
+ "learning_rate": 0.00019923597691001615,
+ "loss": 1.8127,
+ "step": 118
+ },
+ {
+ "epoch": 0.11903347816573412,
+ "grad_norm": 0.7572523355484009,
+ "learning_rate": 0.0001992229893621269,
+ "loss": 1.7768,
+ "step": 119
+ },
+ {
+ "epoch": 0.12003375949485792,
+ "grad_norm": 0.7393172979354858,
+ "learning_rate": 0.00019920989278611687,
+ "loss": 1.894,
+ "step": 120
+ },
+ {
+ "epoch": 0.12103404082398174,
+ "grad_norm": 0.866576611995697,
+ "learning_rate": 0.0001991966871963767,
+ "loss": 1.9285,
+ "step": 121
+ },
+ {
+ "epoch": 0.12203432215310556,
+ "grad_norm": 0.7326533794403076,
+ "learning_rate": 0.000199183372607417,
+ "loss": 1.9309,
+ "step": 122
+ },
+ {
+ "epoch": 0.12303460348222937,
+ "grad_norm": 0.7655537724494934,
+ "learning_rate": 0.0001991699490338681,
+ "loss": 2.1145,
+ "step": 123
+ },
+ {
+ "epoch": 0.12403488481135319,
+ "grad_norm": 0.9714633226394653,
+ "learning_rate": 0.00019915641649048005,
+ "loss": 2.0341,
+ "step": 124
+ },
+ {
+ "epoch": 0.12503516614047702,
+ "grad_norm": 0.8542420864105225,
+ "learning_rate": 0.0001991427749921227,
+ "loss": 2.1426,
+ "step": 125
+ },
+ {
+ "epoch": 0.12603544746960083,
+ "grad_norm": 0.8286274671554565,
+ "learning_rate": 0.00019912902455378556,
+ "loss": 1.8452,
+ "step": 126
+ },
+ {
+ "epoch": 0.12703572879872463,
+ "grad_norm": 0.8823768496513367,
+ "learning_rate": 0.00019911516519057788,
+ "loss": 1.8651,
+ "step": 127
+ },
+ {
+ "epoch": 0.12803601012784846,
+ "grad_norm": 0.7301567196846008,
+ "learning_rate": 0.00019910119691772863,
+ "loss": 1.7776,
+ "step": 128
+ },
+ {
+ "epoch": 0.12903629145697226,
+ "grad_norm": 0.8402552604675293,
+ "learning_rate": 0.00019908711975058637,
+ "loss": 1.8617,
+ "step": 129
+ },
+ {
+ "epoch": 0.1300365727860961,
+ "grad_norm": 0.814500093460083,
+ "learning_rate": 0.0001990729337046194,
+ "loss": 1.9156,
+ "step": 130
+ },
+ {
+ "epoch": 0.1310368541152199,
+ "grad_norm": 0.8262699246406555,
+ "learning_rate": 0.0001990586387954156,
+ "loss": 1.8659,
+ "step": 131
+ },
+ {
+ "epoch": 0.13203713544434373,
+ "grad_norm": 0.8846324682235718,
+ "learning_rate": 0.00019904423503868247,
+ "loss": 2.043,
+ "step": 132
+ },
+ {
+ "epoch": 0.13303741677346753,
+ "grad_norm": 0.8757227659225464,
+ "learning_rate": 0.00019902972245024715,
+ "loss": 1.9217,
+ "step": 133
+ },
+ {
+ "epoch": 0.13403769810259136,
+ "grad_norm": 0.8476879596710205,
+ "learning_rate": 0.00019901510104605637,
+ "loss": 1.8892,
+ "step": 134
+ },
+ {
+ "epoch": 0.13503797943171517,
+ "grad_norm": 0.7707583904266357,
+ "learning_rate": 0.00019900037084217637,
+ "loss": 1.787,
+ "step": 135
+ },
+ {
+ "epoch": 0.136038260760839,
+ "grad_norm": 0.7389562129974365,
+ "learning_rate": 0.00019898553185479303,
+ "loss": 1.5854,
+ "step": 136
+ },
+ {
+ "epoch": 0.1370385420899628,
+ "grad_norm": 0.7331375479698181,
+ "learning_rate": 0.00019897058410021167,
+ "loss": 1.997,
+ "step": 137
+ },
+ {
+ "epoch": 0.13803882341908663,
+ "grad_norm": 0.7219388484954834,
+ "learning_rate": 0.00019895552759485722,
+ "loss": 1.8337,
+ "step": 138
+ },
+ {
+ "epoch": 0.13903910474821043,
+ "grad_norm": 0.8535702228546143,
+ "learning_rate": 0.00019894036235527395,
+ "loss": 1.7818,
+ "step": 139
+ },
+ {
+ "epoch": 0.14003938607733424,
+ "grad_norm": 0.7627841830253601,
+ "learning_rate": 0.00019892508839812584,
+ "loss": 1.8173,
+ "step": 140
+ },
+ {
+ "epoch": 0.14103966740645807,
+ "grad_norm": 0.8397619724273682,
+ "learning_rate": 0.00019890970574019617,
+ "loss": 1.8735,
+ "step": 141
+ },
+ {
+ "epoch": 0.14203994873558187,
+ "grad_norm": 0.8093482851982117,
+ "learning_rate": 0.00019889421439838763,
+ "loss": 1.9918,
+ "step": 142
+ },
+ {
+ "epoch": 0.1430402300647057,
+ "grad_norm": 0.8853684067726135,
+ "learning_rate": 0.00019887861438972246,
+ "loss": 1.5825,
+ "step": 143
+ },
+ {
+ "epoch": 0.1440405113938295,
+ "grad_norm": 0.7413788437843323,
+ "learning_rate": 0.00019886290573134228,
+ "loss": 1.9068,
+ "step": 144
+ },
+ {
+ "epoch": 0.14504079272295334,
+ "grad_norm": 0.7924477458000183,
+ "learning_rate": 0.000198847088440508,
+ "loss": 1.8582,
+ "step": 145
+ },
+ {
+ "epoch": 0.14604107405207714,
+ "grad_norm": 0.8679131865501404,
+ "learning_rate": 0.0001988311625346,
+ "loss": 1.7104,
+ "step": 146
+ },
+ {
+ "epoch": 0.14704135538120097,
+ "grad_norm": 0.7480150461196899,
+ "learning_rate": 0.00019881512803111796,
+ "loss": 1.7288,
+ "step": 147
+ },
+ {
+ "epoch": 0.14804163671032478,
+ "grad_norm": 0.8382390737533569,
+ "learning_rate": 0.00019879898494768093,
+ "loss": 1.8004,
+ "step": 148
+ },
+ {
+ "epoch": 0.1490419180394486,
+ "grad_norm": 0.7360037565231323,
+ "learning_rate": 0.00019878273330202717,
+ "loss": 1.85,
+ "step": 149
+ },
+ {
+ "epoch": 0.1500421993685724,
+ "grad_norm": 0.9644019603729248,
+ "learning_rate": 0.00019876637311201433,
+ "loss": 2.065,
+ "step": 150
+ },
+ {
+ "epoch": 0.15104248069769624,
+ "grad_norm": 0.8116248250007629,
+ "learning_rate": 0.00019874990439561934,
+ "loss": 1.702,
+ "step": 151
+ },
+ {
+ "epoch": 0.15204276202682004,
+ "grad_norm": 0.9301722645759583,
+ "learning_rate": 0.0001987333271709383,
+ "loss": 1.8089,
+ "step": 152
+ },
+ {
+ "epoch": 0.15304304335594385,
+ "grad_norm": 0.7991555333137512,
+ "learning_rate": 0.00019871664145618657,
+ "loss": 1.8227,
+ "step": 153
+ },
+ {
+ "epoch": 0.15404332468506768,
+ "grad_norm": 0.8676092028617859,
+ "learning_rate": 0.00019869984726969878,
+ "loss": 1.7253,
+ "step": 154
+ },
+ {
+ "epoch": 0.15504360601419148,
+ "grad_norm": 0.8022972941398621,
+ "learning_rate": 0.00019868294462992866,
+ "loss": 1.8766,
+ "step": 155
+ },
+ {
+ "epoch": 0.1560438873433153,
+ "grad_norm": 1.128886103630066,
+ "learning_rate": 0.00019866593355544922,
+ "loss": 2.0197,
+ "step": 156
+ },
+ {
+ "epoch": 0.15704416867243912,
+ "grad_norm": 0.7420483827590942,
+ "learning_rate": 0.00019864881406495246,
+ "loss": 1.8825,
+ "step": 157
+ },
+ {
+ "epoch": 0.15804445000156295,
+ "grad_norm": 0.7797536849975586,
+ "learning_rate": 0.00019863158617724967,
+ "loss": 1.8892,
+ "step": 158
+ },
+ {
+ "epoch": 0.15904473133068675,
+ "grad_norm": 0.6859965324401855,
+ "learning_rate": 0.00019861424991127115,
+ "loss": 1.8424,
+ "step": 159
+ },
+ {
+ "epoch": 0.16004501265981058,
+ "grad_norm": 0.8115108609199524,
+ "learning_rate": 0.00019859680528606637,
+ "loss": 1.8394,
+ "step": 160
+ },
+ {
+ "epoch": 0.16104529398893438,
+ "grad_norm": 0.9756322503089905,
+ "learning_rate": 0.00019857925232080373,
+ "loss": 1.726,
+ "step": 161
+ },
+ {
+ "epoch": 0.16204557531805822,
+ "grad_norm": 0.8894350528717041,
+ "learning_rate": 0.00019856159103477086,
+ "loss": 1.8893,
+ "step": 162
+ },
+ {
+ "epoch": 0.16304585664718202,
+ "grad_norm": 0.8075819611549377,
+ "learning_rate": 0.00019854382144737426,
+ "loss": 1.6596,
+ "step": 163
+ },
+ {
+ "epoch": 0.16404613797630582,
+ "grad_norm": 0.8861923813819885,
+ "learning_rate": 0.00019852594357813952,
+ "loss": 1.9352,
+ "step": 164
+ },
+ {
+ "epoch": 0.16504641930542965,
+ "grad_norm": 0.8511936068534851,
+ "learning_rate": 0.00019850795744671116,
+ "loss": 1.9416,
+ "step": 165
+ },
+ {
+ "epoch": 0.16604670063455346,
+ "grad_norm": 0.9425658583641052,
+ "learning_rate": 0.0001984898630728527,
+ "loss": 1.9081,
+ "step": 166
+ },
+ {
+ "epoch": 0.1670469819636773,
+ "grad_norm": 0.7502055168151855,
+ "learning_rate": 0.0001984716604764466,
+ "loss": 1.703,
+ "step": 167
+ },
+ {
+ "epoch": 0.1680472632928011,
+ "grad_norm": 0.9135978817939758,
+ "learning_rate": 0.0001984533496774942,
+ "loss": 1.7641,
+ "step": 168
+ },
+ {
+ "epoch": 0.16904754462192492,
+ "grad_norm": 0.7768126726150513,
+ "learning_rate": 0.0001984349306961158,
+ "loss": 1.7053,
+ "step": 169
+ },
+ {
+ "epoch": 0.17004782595104873,
+ "grad_norm": 0.8106538653373718,
+ "learning_rate": 0.00019841640355255043,
+ "loss": 1.8646,
+ "step": 170
+ },
+ {
+ "epoch": 0.17104810728017256,
+ "grad_norm": 0.7872330546379089,
+ "learning_rate": 0.00019839776826715614,
+ "loss": 1.7814,
+ "step": 171
+ },
+ {
+ "epoch": 0.17204838860929636,
+ "grad_norm": 0.869532585144043,
+ "learning_rate": 0.00019837902486040978,
+ "loss": 1.7812,
+ "step": 172
+ },
+ {
+ "epoch": 0.1730486699384202,
+ "grad_norm": 1.015028715133667,
+ "learning_rate": 0.0001983601733529069,
+ "loss": 1.9432,
+ "step": 173
+ },
+ {
+ "epoch": 0.174048951267544,
+ "grad_norm": 0.800183117389679,
+ "learning_rate": 0.00019834121376536187,
+ "loss": 1.758,
+ "step": 174
+ },
+ {
+ "epoch": 0.17504923259666783,
+ "grad_norm": 0.7427104711532593,
+ "learning_rate": 0.00019832214611860793,
+ "loss": 1.6476,
+ "step": 175
+ },
+ {
+ "epoch": 0.17604951392579163,
+ "grad_norm": 0.8289130926132202,
+ "learning_rate": 0.00019830297043359692,
+ "loss": 1.7702,
+ "step": 176
+ },
+ {
+ "epoch": 0.17704979525491543,
+ "grad_norm": 0.8298771977424622,
+ "learning_rate": 0.00019828368673139947,
+ "loss": 1.7515,
+ "step": 177
+ },
+ {
+ "epoch": 0.17805007658403926,
+ "grad_norm": 0.7602815628051758,
+ "learning_rate": 0.0001982642950332049,
+ "loss": 1.7545,
+ "step": 178
+ },
+ {
+ "epoch": 0.17905035791316307,
+ "grad_norm": 0.8110321164131165,
+ "learning_rate": 0.00019824479536032112,
+ "loss": 2.2604,
+ "step": 179
+ },
+ {
+ "epoch": 0.1800506392422869,
+ "grad_norm": 0.882273256778717,
+ "learning_rate": 0.0001982251877341748,
+ "loss": 1.8133,
+ "step": 180
+ },
+ {
+ "epoch": 0.1810509205714107,
+ "grad_norm": 0.9015639424324036,
+ "learning_rate": 0.00019820547217631117,
+ "loss": 1.7282,
+ "step": 181
+ },
+ {
+ "epoch": 0.18205120190053453,
+ "grad_norm": 0.9231659173965454,
+ "learning_rate": 0.00019818564870839405,
+ "loss": 1.9094,
+ "step": 182
+ },
+ {
+ "epoch": 0.18305148322965833,
+ "grad_norm": 0.8110967874526978,
+ "learning_rate": 0.00019816571735220583,
+ "loss": 1.886,
+ "step": 183
+ },
+ {
+ "epoch": 0.18405176455878217,
+ "grad_norm": 0.7670036554336548,
+ "learning_rate": 0.00019814567812964748,
+ "loss": 1.9895,
+ "step": 184
+ },
+ {
+ "epoch": 0.18505204588790597,
+ "grad_norm": 0.7955975532531738,
+ "learning_rate": 0.00019812553106273847,
+ "loss": 1.8127,
+ "step": 185
+ },
+ {
+ "epoch": 0.1860523272170298,
+ "grad_norm": 0.8790062665939331,
+ "learning_rate": 0.00019810527617361681,
+ "loss": 1.8899,
+ "step": 186
+ },
+ {
+ "epoch": 0.1870526085461536,
+ "grad_norm": 0.8818586468696594,
+ "learning_rate": 0.00019808491348453894,
+ "loss": 1.7707,
+ "step": 187
+ },
+ {
+ "epoch": 0.18805288987527743,
+ "grad_norm": 0.746442437171936,
+ "learning_rate": 0.00019806444301787978,
+ "loss": 1.7281,
+ "step": 188
+ },
+ {
+ "epoch": 0.18905317120440124,
+ "grad_norm": 0.7786905169487,
+ "learning_rate": 0.0001980438647961327,
+ "loss": 1.7317,
+ "step": 189
+ },
+ {
+ "epoch": 0.19005345253352504,
+ "grad_norm": 0.9338862299919128,
+ "learning_rate": 0.00019802317884190935,
+ "loss": 1.9548,
+ "step": 190
+ },
+ {
+ "epoch": 0.19105373386264887,
+ "grad_norm": 0.7416581511497498,
+ "learning_rate": 0.00019800238517793996,
+ "loss": 1.8601,
+ "step": 191
+ },
+ {
+ "epoch": 0.19205401519177268,
+ "grad_norm": 0.6782898902893066,
+ "learning_rate": 0.00019798148382707296,
+ "loss": 1.8477,
+ "step": 192
+ },
+ {
+ "epoch": 0.1930542965208965,
+ "grad_norm": 0.7389237880706787,
+ "learning_rate": 0.00019796047481227515,
+ "loss": 1.7749,
+ "step": 193
+ },
+ {
+ "epoch": 0.1940545778500203,
+ "grad_norm": 0.9711095094680786,
+ "learning_rate": 0.00019793935815663163,
+ "loss": 2.0899,
+ "step": 194
+ },
+ {
+ "epoch": 0.19505485917914414,
+ "grad_norm": 0.7949391007423401,
+ "learning_rate": 0.00019791813388334581,
+ "loss": 1.8778,
+ "step": 195
+ },
+ {
+ "epoch": 0.19605514050826794,
+ "grad_norm": 0.8871057033538818,
+ "learning_rate": 0.00019789680201573933,
+ "loss": 1.7511,
+ "step": 196
+ },
+ {
+ "epoch": 0.19705542183739178,
+ "grad_norm": 0.8664624094963074,
+ "learning_rate": 0.00019787536257725202,
+ "loss": 1.7232,
+ "step": 197
+ },
+ {
+ "epoch": 0.19805570316651558,
+ "grad_norm": 0.871658980846405,
+ "learning_rate": 0.00019785381559144196,
+ "loss": 1.7987,
+ "step": 198
+ },
+ {
+ "epoch": 0.1990559844956394,
+ "grad_norm": 0.7748361229896545,
+ "learning_rate": 0.00019783216108198542,
+ "loss": 1.9239,
+ "step": 199
+ },
+ {
+ "epoch": 0.2000562658247632,
+ "grad_norm": 0.9393408298492432,
+ "learning_rate": 0.00019781039907267677,
+ "loss": 2.0936,
+ "step": 200
+ },
+ {
+ "epoch": 0.20105654715388704,
+ "grad_norm": 0.8519601225852966,
+ "learning_rate": 0.00019778852958742853,
+ "loss": 1.9108,
+ "step": 201
+ },
+ {
+ "epoch": 0.20205682848301085,
+ "grad_norm": 0.8464863300323486,
+ "learning_rate": 0.00019776655265027127,
+ "loss": 1.897,
+ "step": 202
+ },
+ {
+ "epoch": 0.20305710981213465,
+ "grad_norm": 0.8933351635932922,
+ "learning_rate": 0.00019774446828535371,
+ "loss": 1.8204,
+ "step": 203
+ },
+ {
+ "epoch": 0.20405739114125848,
+ "grad_norm": 0.8305785059928894,
+ "learning_rate": 0.00019772227651694256,
+ "loss": 1.9135,
+ "step": 204
+ },
+ {
+ "epoch": 0.20505767247038229,
+ "grad_norm": 0.8117037415504456,
+ "learning_rate": 0.00019769997736942258,
+ "loss": 1.7585,
+ "step": 205
+ },
+ {
+ "epoch": 0.20605795379950612,
+ "grad_norm": 0.7570348381996155,
+ "learning_rate": 0.00019767757086729647,
+ "loss": 1.8373,
+ "step": 206
+ },
+ {
+ "epoch": 0.20705823512862992,
+ "grad_norm": 0.9291234016418457,
+ "learning_rate": 0.00019765505703518496,
+ "loss": 1.7774,
+ "step": 207
+ },
+ {
+ "epoch": 0.20805851645775375,
+ "grad_norm": 0.8211004137992859,
+ "learning_rate": 0.00019763243589782662,
+ "loss": 1.8766,
+ "step": 208
+ },
+ {
+ "epoch": 0.20905879778687755,
+ "grad_norm": 0.6625431180000305,
+ "learning_rate": 0.00019760970748007803,
+ "loss": 1.628,
+ "step": 209
+ },
+ {
+ "epoch": 0.21005907911600138,
+ "grad_norm": 0.7974782586097717,
+ "learning_rate": 0.0001975868718069136,
+ "loss": 1.6896,
+ "step": 210
+ },
+ {
+ "epoch": 0.2110593604451252,
+ "grad_norm": 0.8364912867546082,
+ "learning_rate": 0.00019756392890342563,
+ "loss": 1.7492,
+ "step": 211
+ },
+ {
+ "epoch": 0.21205964177424902,
+ "grad_norm": 0.8730652332305908,
+ "learning_rate": 0.00019754087879482422,
+ "loss": 1.8295,
+ "step": 212
+ },
+ {
+ "epoch": 0.21305992310337282,
+ "grad_norm": 0.7532863020896912,
+ "learning_rate": 0.00019751772150643722,
+ "loss": 1.8309,
+ "step": 213
+ },
+ {
+ "epoch": 0.21406020443249665,
+ "grad_norm": 0.7375178933143616,
+ "learning_rate": 0.00019749445706371038,
+ "loss": 1.7854,
+ "step": 214
+ },
+ {
+ "epoch": 0.21506048576162046,
+ "grad_norm": 0.7524377703666687,
+ "learning_rate": 0.00019747108549220702,
+ "loss": 1.7683,
+ "step": 215
+ },
+ {
+ "epoch": 0.21606076709074426,
+ "grad_norm": 0.7331809997558594,
+ "learning_rate": 0.00019744760681760832,
+ "loss": 1.7103,
+ "step": 216
+ },
+ {
+ "epoch": 0.2170610484198681,
+ "grad_norm": 0.8083691596984863,
+ "learning_rate": 0.00019742402106571314,
+ "loss": 1.674,
+ "step": 217
+ },
+ {
+ "epoch": 0.2180613297489919,
+ "grad_norm": 0.8524570465087891,
+ "learning_rate": 0.00019740032826243788,
+ "loss": 1.7227,
+ "step": 218
+ },
+ {
+ "epoch": 0.21906161107811573,
+ "grad_norm": 0.7676658630371094,
+ "learning_rate": 0.0001973765284338167,
+ "loss": 1.8561,
+ "step": 219
+ },
+ {
+ "epoch": 0.22006189240723953,
+ "grad_norm": 0.7858710289001465,
+ "learning_rate": 0.00019735262160600127,
+ "loss": 1.7796,
+ "step": 220
+ },
+ {
+ "epoch": 0.22106217373636336,
+ "grad_norm": 0.7587497234344482,
+ "learning_rate": 0.00019732860780526088,
+ "loss": 1.9271,
+ "step": 221
+ },
+ {
+ "epoch": 0.22206245506548716,
+ "grad_norm": 0.8084688186645508,
+ "learning_rate": 0.00019730448705798239,
+ "loss": 1.8176,
+ "step": 222
+ },
+ {
+ "epoch": 0.223062736394611,
+ "grad_norm": 0.6736906170845032,
+ "learning_rate": 0.00019728025939067008,
+ "loss": 1.6288,
+ "step": 223
+ },
+ {
+ "epoch": 0.2240630177237348,
+ "grad_norm": 0.7483925819396973,
+ "learning_rate": 0.00019725592482994583,
+ "loss": 1.8363,
+ "step": 224
+ },
+ {
+ "epoch": 0.22506329905285863,
+ "grad_norm": 1.7995796203613281,
+ "learning_rate": 0.00019723148340254892,
+ "loss": 1.9072,
+ "step": 225
+ },
+ {
+ "epoch": 0.22606358038198243,
+ "grad_norm": 0.8028881549835205,
+ "learning_rate": 0.00019720693513533598,
+ "loss": 1.9021,
+ "step": 226
+ },
+ {
+ "epoch": 0.22706386171110624,
+ "grad_norm": 0.9853909015655518,
+ "learning_rate": 0.00019718228005528122,
+ "loss": 2.0159,
+ "step": 227
+ },
+ {
+ "epoch": 0.22806414304023007,
+ "grad_norm": 0.7784947156906128,
+ "learning_rate": 0.00019715751818947603,
+ "loss": 1.7816,
+ "step": 228
+ },
+ {
+ "epoch": 0.22906442436935387,
+ "grad_norm": 0.7447614669799805,
+ "learning_rate": 0.0001971326495651293,
+ "loss": 1.654,
+ "step": 229
+ },
+ {
+ "epoch": 0.2300647056984777,
+ "grad_norm": 0.8673064112663269,
+ "learning_rate": 0.00019710767420956705,
+ "loss": 2.0049,
+ "step": 230
+ },
+ {
+ "epoch": 0.2310649870276015,
+ "grad_norm": 0.8207747936248779,
+ "learning_rate": 0.0001970825921502328,
+ "loss": 1.9388,
+ "step": 231
+ },
+ {
+ "epoch": 0.23206526835672533,
+ "grad_norm": 0.742266058921814,
+ "learning_rate": 0.0001970574034146871,
+ "loss": 1.7658,
+ "step": 232
+ },
+ {
+ "epoch": 0.23306554968584914,
+ "grad_norm": 0.9097973704338074,
+ "learning_rate": 0.00019703210803060782,
+ "loss": 1.8023,
+ "step": 233
+ },
+ {
+ "epoch": 0.23406583101497297,
+ "grad_norm": 0.7512438297271729,
+ "learning_rate": 0.00019700670602579008,
+ "loss": 1.8551,
+ "step": 234
+ },
+ {
+ "epoch": 0.23506611234409677,
+ "grad_norm": 0.8303943872451782,
+ "learning_rate": 0.00019698119742814606,
+ "loss": 1.7723,
+ "step": 235
+ },
+ {
+ "epoch": 0.2360663936732206,
+ "grad_norm": 0.9195139408111572,
+ "learning_rate": 0.00019695558226570507,
+ "loss": 1.6426,
+ "step": 236
+ },
+ {
+ "epoch": 0.2370666750023444,
+ "grad_norm": 0.7734714150428772,
+ "learning_rate": 0.00019692986056661356,
+ "loss": 1.7798,
+ "step": 237
+ },
+ {
+ "epoch": 0.23806695633146824,
+ "grad_norm": 0.8759648203849792,
+ "learning_rate": 0.00019690403235913504,
+ "loss": 1.6465,
+ "step": 238
+ },
+ {
+ "epoch": 0.23906723766059204,
+ "grad_norm": 0.7688003778457642,
+ "learning_rate": 0.00019687809767165,
+ "loss": 2.0092,
+ "step": 239
+ },
+ {
+ "epoch": 0.24006751898971584,
+ "grad_norm": 0.7398790121078491,
+ "learning_rate": 0.000196852056532656,
+ "loss": 1.8176,
+ "step": 240
+ },
+ {
+ "epoch": 0.24106780031883968,
+ "grad_norm": 0.8921257853507996,
+ "learning_rate": 0.00019682590897076752,
+ "loss": 1.7387,
+ "step": 241
+ },
+ {
+ "epoch": 0.24206808164796348,
+ "grad_norm": 0.7939002513885498,
+ "learning_rate": 0.00019679965501471608,
+ "loss": 1.9417,
+ "step": 242
+ },
+ {
+ "epoch": 0.2430683629770873,
+ "grad_norm": 0.7798025608062744,
+ "learning_rate": 0.0001967732946933499,
+ "loss": 1.7134,
+ "step": 243
+ },
+ {
+ "epoch": 0.2440686443062111,
+ "grad_norm": 0.8007254600524902,
+ "learning_rate": 0.00019674682803563428,
+ "loss": 1.7387,
+ "step": 244
+ },
+ {
+ "epoch": 0.24506892563533494,
+ "grad_norm": 0.6257696151733398,
+ "learning_rate": 0.00019672025507065131,
+ "loss": 1.767,
+ "step": 245
+ },
+ {
+ "epoch": 0.24606920696445875,
+ "grad_norm": 0.7942785620689392,
+ "learning_rate": 0.00019669357582759983,
+ "loss": 1.8801,
+ "step": 246
+ },
+ {
+ "epoch": 0.24706948829358258,
+ "grad_norm": 0.7933829426765442,
+ "learning_rate": 0.00019666679033579552,
+ "loss": 1.9711,
+ "step": 247
+ },
+ {
+ "epoch": 0.24806976962270638,
+ "grad_norm": 0.7489326596260071,
+ "learning_rate": 0.00019663989862467082,
+ "loss": 1.8038,
+ "step": 248
+ },
+ {
+ "epoch": 0.2490700509518302,
+ "grad_norm": 0.7279101014137268,
+ "learning_rate": 0.00019661290072377482,
+ "loss": 1.66,
+ "step": 249
+ },
+ {
+ "epoch": 0.25007033228095404,
+ "grad_norm": 0.6823874115943909,
+ "learning_rate": 0.00019658579666277334,
+ "loss": 1.8064,
+ "step": 250
+ },
+ {
+ "epoch": 0.2510706136100778,
+ "grad_norm": 0.6561273336410522,
+ "learning_rate": 0.0001965585864714488,
+ "loss": 1.6874,
+ "step": 251
+ },
+ {
+ "epoch": 0.25207089493920165,
+ "grad_norm": 0.6457573175430298,
+ "learning_rate": 0.00019653127017970034,
+ "loss": 1.4587,
+ "step": 252
+ },
+ {
+ "epoch": 0.2530711762683255,
+ "grad_norm": 0.7649476528167725,
+ "learning_rate": 0.0001965038478175436,
+ "loss": 1.9811,
+ "step": 253
+ },
+ {
+ "epoch": 0.25407145759744926,
+ "grad_norm": 0.8786829710006714,
+ "learning_rate": 0.00019647631941511082,
+ "loss": 1.8629,
+ "step": 254
+ },
+ {
+ "epoch": 0.2550717389265731,
+ "grad_norm": 0.7038159966468811,
+ "learning_rate": 0.0001964486850026507,
+ "loss": 1.6885,
+ "step": 255
+ },
+ {
+ "epoch": 0.2560720202556969,
+ "grad_norm": 0.7255909442901611,
+ "learning_rate": 0.00019642094461052852,
+ "loss": 1.7335,
+ "step": 256
+ },
+ {
+ "epoch": 0.25707230158482075,
+ "grad_norm": 0.7780727744102478,
+ "learning_rate": 0.00019639309826922585,
+ "loss": 1.899,
+ "step": 257
+ },
+ {
+ "epoch": 0.2580725829139445,
+ "grad_norm": 0.8533650040626526,
+ "learning_rate": 0.0001963651460093409,
+ "loss": 1.7711,
+ "step": 258
+ },
+ {
+ "epoch": 0.25907286424306836,
+ "grad_norm": 0.6440068483352661,
+ "learning_rate": 0.00019633708786158806,
+ "loss": 1.6685,
+ "step": 259
+ },
+ {
+ "epoch": 0.2600731455721922,
+ "grad_norm": 0.6873877048492432,
+ "learning_rate": 0.00019630892385679818,
+ "loss": 1.7502,
+ "step": 260
+ },
+ {
+ "epoch": 0.261073426901316,
+ "grad_norm": 0.7100672721862793,
+ "learning_rate": 0.00019628065402591845,
+ "loss": 1.7789,
+ "step": 261
+ },
+ {
+ "epoch": 0.2620737082304398,
+ "grad_norm": 0.8447420001029968,
+ "learning_rate": 0.00019625227840001225,
+ "loss": 1.8577,
+ "step": 262
+ },
+ {
+ "epoch": 0.2630739895595636,
+ "grad_norm": 0.767888605594635,
+ "learning_rate": 0.0001962237970102593,
+ "loss": 1.5936,
+ "step": 263
+ },
+ {
+ "epoch": 0.26407427088868746,
+ "grad_norm": 0.6955805420875549,
+ "learning_rate": 0.0001961952098879555,
+ "loss": 1.7733,
+ "step": 264
+ },
+ {
+ "epoch": 0.26507455221781123,
+ "grad_norm": 0.777740478515625,
+ "learning_rate": 0.00019616651706451287,
+ "loss": 1.6027,
+ "step": 265
+ },
+ {
+ "epoch": 0.26607483354693506,
+ "grad_norm": 0.7691099047660828,
+ "learning_rate": 0.0001961377185714597,
+ "loss": 1.7457,
+ "step": 266
+ },
+ {
+ "epoch": 0.2670751148760589,
+ "grad_norm": 0.6778420805931091,
+ "learning_rate": 0.0001961088144404403,
+ "loss": 1.7704,
+ "step": 267
+ },
+ {
+ "epoch": 0.2680753962051827,
+ "grad_norm": 0.7943267226219177,
+ "learning_rate": 0.00019607980470321505,
+ "loss": 1.9775,
+ "step": 268
+ },
+ {
+ "epoch": 0.2690756775343065,
+ "grad_norm": 0.6660135388374329,
+ "learning_rate": 0.00019605068939166045,
+ "loss": 1.6556,
+ "step": 269
+ },
+ {
+ "epoch": 0.27007595886343033,
+ "grad_norm": 0.8664935827255249,
+ "learning_rate": 0.00019602146853776894,
+ "loss": 2.03,
+ "step": 270
+ },
+ {
+ "epoch": 0.27107624019255416,
+ "grad_norm": 0.7783074975013733,
+ "learning_rate": 0.000195992142173649,
+ "loss": 1.7426,
+ "step": 271
+ },
+ {
+ "epoch": 0.272076521521678,
+ "grad_norm": 0.7470223903656006,
+ "learning_rate": 0.0001959627103315249,
+ "loss": 1.7284,
+ "step": 272
+ },
+ {
+ "epoch": 0.27307680285080177,
+ "grad_norm": 0.7284931540489197,
+ "learning_rate": 0.00019593317304373705,
+ "loss": 1.6977,
+ "step": 273
+ },
+ {
+ "epoch": 0.2740770841799256,
+ "grad_norm": 0.7201762795448303,
+ "learning_rate": 0.00019590353034274144,
+ "loss": 1.7184,
+ "step": 274
+ },
+ {
+ "epoch": 0.27507736550904943,
+ "grad_norm": 0.6756151914596558,
+ "learning_rate": 0.00019587378226111014,
+ "loss": 1.7276,
+ "step": 275
+ },
+ {
+ "epoch": 0.27607764683817326,
+ "grad_norm": 0.6784201860427856,
+ "learning_rate": 0.00019584392883153088,
+ "loss": 1.642,
+ "step": 276
+ },
+ {
+ "epoch": 0.27707792816729704,
+ "grad_norm": 0.7387176752090454,
+ "learning_rate": 0.00019581397008680717,
+ "loss": 1.7911,
+ "step": 277
+ },
+ {
+ "epoch": 0.27807820949642087,
+ "grad_norm": 0.9367021918296814,
+ "learning_rate": 0.00019578390605985826,
+ "loss": 2.0034,
+ "step": 278
+ },
+ {
+ "epoch": 0.2790784908255447,
+ "grad_norm": 0.803698718547821,
+ "learning_rate": 0.00019575373678371909,
+ "loss": 1.7907,
+ "step": 279
+ },
+ {
+ "epoch": 0.2800787721546685,
+ "grad_norm": 0.7324479818344116,
+ "learning_rate": 0.00019572346229154025,
+ "loss": 1.5539,
+ "step": 280
+ },
+ {
+ "epoch": 0.2810790534837923,
+ "grad_norm": 0.7107382416725159,
+ "learning_rate": 0.00019569308261658787,
+ "loss": 1.838,
+ "step": 281
+ },
+ {
+ "epoch": 0.28207933481291614,
+ "grad_norm": 0.8698626756668091,
+ "learning_rate": 0.00019566259779224378,
+ "loss": 1.7433,
+ "step": 282
+ },
+ {
+ "epoch": 0.28307961614203997,
+ "grad_norm": 0.7804028391838074,
+ "learning_rate": 0.00019563200785200526,
+ "loss": 1.7161,
+ "step": 283
+ },
+ {
+ "epoch": 0.28407989747116374,
+ "grad_norm": 0.8762909173965454,
+ "learning_rate": 0.00019560131282948516,
+ "loss": 1.8031,
+ "step": 284
+ },
+ {
+ "epoch": 0.2850801788002876,
+ "grad_norm": 0.8252436518669128,
+ "learning_rate": 0.0001955705127584117,
+ "loss": 1.6434,
+ "step": 285
+ },
+ {
+ "epoch": 0.2860804601294114,
+ "grad_norm": 0.8220797181129456,
+ "learning_rate": 0.00019553960767262863,
+ "loss": 1.8522,
+ "step": 286
+ },
+ {
+ "epoch": 0.28708074145853524,
+ "grad_norm": 0.7883003950119019,
+ "learning_rate": 0.00019550859760609503,
+ "loss": 1.8245,
+ "step": 287
+ },
+ {
+ "epoch": 0.288081022787659,
+ "grad_norm": 0.9208703637123108,
+ "learning_rate": 0.00019547748259288536,
+ "loss": 1.8877,
+ "step": 288
+ },
+ {
+ "epoch": 0.28908130411678284,
+ "grad_norm": 0.8452202677726746,
+ "learning_rate": 0.0001954462626671894,
+ "loss": 1.554,
+ "step": 289
+ },
+ {
+ "epoch": 0.2900815854459067,
+ "grad_norm": 0.82865971326828,
+ "learning_rate": 0.0001954149378633122,
+ "loss": 1.655,
+ "step": 290
+ },
+ {
+ "epoch": 0.29108186677503045,
+ "grad_norm": 0.7871205806732178,
+ "learning_rate": 0.00019538350821567404,
+ "loss": 1.621,
+ "step": 291
+ },
+ {
+ "epoch": 0.2920821481041543,
+ "grad_norm": 0.8288848996162415,
+ "learning_rate": 0.00019535197375881045,
+ "loss": 1.9277,
+ "step": 292
+ },
+ {
+ "epoch": 0.2930824294332781,
+ "grad_norm": 0.7275516986846924,
+ "learning_rate": 0.00019532033452737205,
+ "loss": 1.7949,
+ "step": 293
+ },
+ {
+ "epoch": 0.29408271076240194,
+ "grad_norm": 0.7424570322036743,
+ "learning_rate": 0.00019528859055612468,
+ "loss": 1.6407,
+ "step": 294
+ },
+ {
+ "epoch": 0.2950829920915257,
+ "grad_norm": 0.7031363248825073,
+ "learning_rate": 0.0001952567418799492,
+ "loss": 1.8793,
+ "step": 295
+ },
+ {
+ "epoch": 0.29608327342064955,
+ "grad_norm": 0.7190185189247131,
+ "learning_rate": 0.00019522478853384155,
+ "loss": 1.6759,
+ "step": 296
+ },
+ {
+ "epoch": 0.2970835547497734,
+ "grad_norm": 0.7270736694335938,
+ "learning_rate": 0.00019519273055291266,
+ "loss": 1.6351,
+ "step": 297
+ },
+ {
+ "epoch": 0.2980838360788972,
+ "grad_norm": 0.8894152641296387,
+ "learning_rate": 0.00019516056797238846,
+ "loss": 1.7908,
+ "step": 298
+ },
+ {
+ "epoch": 0.299084117408021,
+ "grad_norm": 0.9089106321334839,
+ "learning_rate": 0.00019512830082760987,
+ "loss": 1.6018,
+ "step": 299
+ },
+ {
+ "epoch": 0.3000843987371448,
+ "grad_norm": 0.8772429823875427,
+ "learning_rate": 0.00019509592915403255,
+ "loss": 1.8474,
+ "step": 300
+ },
+ {
+ "epoch": 0.30108468006626865,
+ "grad_norm": 0.8244933485984802,
+ "learning_rate": 0.00019506345298722717,
+ "loss": 1.4324,
+ "step": 301
+ },
+ {
+ "epoch": 0.3020849613953925,
+ "grad_norm": 0.7283012866973877,
+ "learning_rate": 0.00019503087236287913,
+ "loss": 1.5115,
+ "step": 302
+ },
+ {
+ "epoch": 0.30308524272451626,
+ "grad_norm": 0.7721333503723145,
+ "learning_rate": 0.00019499818731678873,
+ "loss": 1.6728,
+ "step": 303
+ },
+ {
+ "epoch": 0.3040855240536401,
+ "grad_norm": 0.7579306960105896,
+ "learning_rate": 0.00019496539788487082,
+ "loss": 1.5927,
+ "step": 304
+ },
+ {
+ "epoch": 0.3050858053827639,
+ "grad_norm": 0.9054704308509827,
+ "learning_rate": 0.0001949325041031551,
+ "loss": 1.9027,
+ "step": 305
+ },
+ {
+ "epoch": 0.3060860867118877,
+ "grad_norm": 0.7023262977600098,
+ "learning_rate": 0.0001948995060077859,
+ "loss": 1.7705,
+ "step": 306
+ },
+ {
+ "epoch": 0.3070863680410115,
+ "grad_norm": 0.7942065000534058,
+ "learning_rate": 0.0001948664036350221,
+ "loss": 1.8269,
+ "step": 307
+ },
+ {
+ "epoch": 0.30808664937013536,
+ "grad_norm": 0.9305068850517273,
+ "learning_rate": 0.00019483319702123732,
+ "loss": 1.8247,
+ "step": 308
+ },
+ {
+ "epoch": 0.3090869306992592,
+ "grad_norm": 0.814664900302887,
+ "learning_rate": 0.00019479988620291956,
+ "loss": 1.9179,
+ "step": 309
+ },
+ {
+ "epoch": 0.31008721202838296,
+ "grad_norm": 0.6418014764785767,
+ "learning_rate": 0.00019476647121667137,
+ "loss": 1.5011,
+ "step": 310
+ },
+ {
+ "epoch": 0.3110874933575068,
+ "grad_norm": 0.7911447882652283,
+ "learning_rate": 0.00019473295209920983,
+ "loss": 1.857,
+ "step": 311
+ },
+ {
+ "epoch": 0.3120877746866306,
+ "grad_norm": 0.7792949676513672,
+ "learning_rate": 0.00019469932888736632,
+ "loss": 1.7279,
+ "step": 312
+ },
+ {
+ "epoch": 0.31308805601575446,
+ "grad_norm": 0.7579171657562256,
+ "learning_rate": 0.00019466560161808674,
+ "loss": 1.6902,
+ "step": 313
+ },
+ {
+ "epoch": 0.31408833734487823,
+ "grad_norm": 0.7052372694015503,
+ "learning_rate": 0.00019463177032843124,
+ "loss": 1.7302,
+ "step": 314
+ },
+ {
+ "epoch": 0.31508861867400206,
+ "grad_norm": 0.7188624143600464,
+ "learning_rate": 0.00019459783505557424,
+ "loss": 1.7338,
+ "step": 315
+ },
+ {
+ "epoch": 0.3160889000031259,
+ "grad_norm": 0.6057978272438049,
+ "learning_rate": 0.00019456379583680452,
+ "loss": 1.6123,
+ "step": 316
+ },
+ {
+ "epoch": 0.31708918133224967,
+ "grad_norm": 0.8339365720748901,
+ "learning_rate": 0.000194529652709525,
+ "loss": 1.9765,
+ "step": 317
+ },
+ {
+ "epoch": 0.3180894626613735,
+ "grad_norm": 0.8524260520935059,
+ "learning_rate": 0.00019449540571125286,
+ "loss": 1.6803,
+ "step": 318
+ },
+ {
+ "epoch": 0.31908974399049733,
+ "grad_norm": 0.7035975456237793,
+ "learning_rate": 0.00019446105487961926,
+ "loss": 1.5792,
+ "step": 319
+ },
+ {
+ "epoch": 0.32009002531962116,
+ "grad_norm": 0.7894249558448792,
+ "learning_rate": 0.0001944266002523696,
+ "loss": 1.6326,
+ "step": 320
+ },
+ {
+ "epoch": 0.32109030664874494,
+ "grad_norm": 0.7716989517211914,
+ "learning_rate": 0.0001943920418673633,
+ "loss": 1.6871,
+ "step": 321
+ },
+ {
+ "epoch": 0.32209058797786877,
+ "grad_norm": 0.7914933562278748,
+ "learning_rate": 0.00019435737976257377,
+ "loss": 1.7148,
+ "step": 322
+ },
+ {
+ "epoch": 0.3230908693069926,
+ "grad_norm": 0.7113205790519714,
+ "learning_rate": 0.00019432261397608834,
+ "loss": 1.5236,
+ "step": 323
+ },
+ {
+ "epoch": 0.32409115063611643,
+ "grad_norm": 0.8609917163848877,
+ "learning_rate": 0.00019428774454610843,
+ "loss": 1.8101,
+ "step": 324
+ },
+ {
+ "epoch": 0.3250914319652402,
+ "grad_norm": 0.7319685220718384,
+ "learning_rate": 0.00019425277151094913,
+ "loss": 1.7712,
+ "step": 325
+ },
+ {
+ "epoch": 0.32609171329436404,
+ "grad_norm": 0.6478747725486755,
+ "learning_rate": 0.00019421769490903957,
+ "loss": 1.8535,
+ "step": 326
+ },
+ {
+ "epoch": 0.32709199462348787,
+ "grad_norm": 0.7025763392448425,
+ "learning_rate": 0.0001941825147789225,
+ "loss": 1.9213,
+ "step": 327
+ },
+ {
+ "epoch": 0.32809227595261165,
+ "grad_norm": 0.7595239877700806,
+ "learning_rate": 0.00019414723115925456,
+ "loss": 1.7449,
+ "step": 328
+ },
+ {
+ "epoch": 0.3290925572817355,
+ "grad_norm": 0.7728105783462524,
+ "learning_rate": 0.0001941118440888061,
+ "loss": 1.8821,
+ "step": 329
+ },
+ {
+ "epoch": 0.3300928386108593,
+ "grad_norm": 0.7430977821350098,
+ "learning_rate": 0.0001940763536064611,
+ "loss": 1.6904,
+ "step": 330
+ },
+ {
+ "epoch": 0.33109311993998314,
+ "grad_norm": 0.7909367680549622,
+ "learning_rate": 0.00019404075975121716,
+ "loss": 1.7899,
+ "step": 331
+ },
+ {
+ "epoch": 0.3320934012691069,
+ "grad_norm": 0.7561226487159729,
+ "learning_rate": 0.0001940050625621855,
+ "loss": 1.7746,
+ "step": 332
+ },
+ {
+ "epoch": 0.33309368259823074,
+ "grad_norm": 0.7602452635765076,
+ "learning_rate": 0.00019396926207859084,
+ "loss": 1.7909,
+ "step": 333
+ },
+ {
+ "epoch": 0.3340939639273546,
+ "grad_norm": 0.8194379806518555,
+ "learning_rate": 0.0001939333583397715,
+ "loss": 1.7039,
+ "step": 334
+ },
+ {
+ "epoch": 0.3350942452564784,
+ "grad_norm": 0.7036342024803162,
+ "learning_rate": 0.00019389735138517915,
+ "loss": 1.6663,
+ "step": 335
+ },
+ {
+ "epoch": 0.3360945265856022,
+ "grad_norm": 0.8429521918296814,
+ "learning_rate": 0.00019386124125437895,
+ "loss": 1.589,
+ "step": 336
+ },
+ {
+ "epoch": 0.337094807914726,
+ "grad_norm": 0.7271071076393127,
+ "learning_rate": 0.00019382502798704935,
+ "loss": 1.646,
+ "step": 337
+ },
+ {
+ "epoch": 0.33809508924384984,
+ "grad_norm": 0.7862086892127991,
+ "learning_rate": 0.00019378871162298227,
+ "loss": 1.6085,
+ "step": 338
+ },
+ {
+ "epoch": 0.3390953705729737,
+ "grad_norm": 0.676815390586853,
+ "learning_rate": 0.00019375229220208276,
+ "loss": 1.7335,
+ "step": 339
+ },
+ {
+ "epoch": 0.34009565190209745,
+ "grad_norm": 0.8916042447090149,
+ "learning_rate": 0.00019371576976436917,
+ "loss": 1.7914,
+ "step": 340
+ },
+ {
+ "epoch": 0.3410959332312213,
+ "grad_norm": 0.7913751006126404,
+ "learning_rate": 0.00019367914434997312,
+ "loss": 1.6031,
+ "step": 341
+ },
+ {
+ "epoch": 0.3420962145603451,
+ "grad_norm": 0.7409866452217102,
+ "learning_rate": 0.00019364241599913924,
+ "loss": 1.6525,
+ "step": 342
+ },
+ {
+ "epoch": 0.3430964958894689,
+ "grad_norm": 0.7472705841064453,
+ "learning_rate": 0.0001936055847522254,
+ "loss": 1.6716,
+ "step": 343
+ },
+ {
+ "epoch": 0.3440967772185927,
+ "grad_norm": 0.7030773758888245,
+ "learning_rate": 0.00019356865064970244,
+ "loss": 1.7134,
+ "step": 344
+ },
+ {
+ "epoch": 0.34509705854771655,
+ "grad_norm": 0.6609564423561096,
+ "learning_rate": 0.0001935316137321543,
+ "loss": 1.7127,
+ "step": 345
+ },
+ {
+ "epoch": 0.3460973398768404,
+ "grad_norm": 0.7811393141746521,
+ "learning_rate": 0.00019349447404027782,
+ "loss": 1.75,
+ "step": 346
+ },
+ {
+ "epoch": 0.34709762120596416,
+ "grad_norm": 0.6980521082878113,
+ "learning_rate": 0.00019345723161488283,
+ "loss": 1.82,
+ "step": 347
+ },
+ {
+ "epoch": 0.348097902535088,
+ "grad_norm": 0.749796986579895,
+ "learning_rate": 0.000193419886496892,
+ "loss": 1.9755,
+ "step": 348
+ },
+ {
+ "epoch": 0.3490981838642118,
+ "grad_norm": 0.9486667513847351,
+ "learning_rate": 0.00019338243872734086,
+ "loss": 1.7047,
+ "step": 349
+ },
+ {
+ "epoch": 0.35009846519333565,
+ "grad_norm": 0.8086081147193909,
+ "learning_rate": 0.00019334488834737775,
+ "loss": 1.661,
+ "step": 350
+ },
+ {
+ "epoch": 0.3510987465224594,
+ "grad_norm": 0.700549840927124,
+ "learning_rate": 0.00019330723539826375,
+ "loss": 1.8696,
+ "step": 351
+ },
+ {
+ "epoch": 0.35209902785158326,
+ "grad_norm": 0.7465476393699646,
+ "learning_rate": 0.00019326947992137262,
+ "loss": 1.5444,
+ "step": 352
+ },
+ {
+ "epoch": 0.3530993091807071,
+ "grad_norm": 0.7370999455451965,
+ "learning_rate": 0.00019323162195819082,
+ "loss": 1.8805,
+ "step": 353
+ },
+ {
+ "epoch": 0.35409959050983086,
+ "grad_norm": 0.719359278678894,
+ "learning_rate": 0.0001931936615503174,
+ "loss": 1.8022,
+ "step": 354
+ },
+ {
+ "epoch": 0.3550998718389547,
+ "grad_norm": 0.7301434278488159,
+ "learning_rate": 0.000193155598739464,
+ "loss": 1.6984,
+ "step": 355
+ },
+ {
+ "epoch": 0.3561001531680785,
+ "grad_norm": 0.7191399335861206,
+ "learning_rate": 0.0001931174335674547,
+ "loss": 1.7229,
+ "step": 356
+ },
+ {
+ "epoch": 0.35710043449720236,
+ "grad_norm": 0.7471932768821716,
+ "learning_rate": 0.0001930791660762262,
+ "loss": 1.7408,
+ "step": 357
+ },
+ {
+ "epoch": 0.35810071582632613,
+ "grad_norm": 0.8197934031486511,
+ "learning_rate": 0.00019304079630782752,
+ "loss": 1.6938,
+ "step": 358
+ },
+ {
+ "epoch": 0.35910099715544996,
+ "grad_norm": 0.7408166527748108,
+ "learning_rate": 0.0001930023243044201,
+ "loss": 1.7798,
+ "step": 359
+ },
+ {
+ "epoch": 0.3601012784845738,
+ "grad_norm": 0.7525373101234436,
+ "learning_rate": 0.00019296375010827773,
+ "loss": 1.711,
+ "step": 360
+ },
+ {
+ "epoch": 0.3611015598136976,
+ "grad_norm": 0.6712046265602112,
+ "learning_rate": 0.00019292507376178643,
+ "loss": 1.8157,
+ "step": 361
+ },
+ {
+ "epoch": 0.3621018411428214,
+ "grad_norm": 0.6712916493415833,
+ "learning_rate": 0.00019288629530744454,
+ "loss": 1.8707,
+ "step": 362
+ },
+ {
+ "epoch": 0.36310212247194523,
+ "grad_norm": 0.6127772331237793,
+ "learning_rate": 0.0001928474147878626,
+ "loss": 1.4743,
+ "step": 363
+ },
+ {
+ "epoch": 0.36410240380106906,
+ "grad_norm": 0.910310685634613,
+ "learning_rate": 0.0001928084322457632,
+ "loss": 1.7956,
+ "step": 364
+ },
+ {
+ "epoch": 0.3651026851301929,
+ "grad_norm": 0.6267688870429993,
+ "learning_rate": 0.00019276934772398114,
+ "loss": 1.4664,
+ "step": 365
+ },
+ {
+ "epoch": 0.36610296645931667,
+ "grad_norm": 0.8317943811416626,
+ "learning_rate": 0.00019273016126546323,
+ "loss": 1.853,
+ "step": 366
+ },
+ {
+ "epoch": 0.3671032477884405,
+ "grad_norm": 0.7581344842910767,
+ "learning_rate": 0.00019269087291326833,
+ "loss": 1.9236,
+ "step": 367
+ },
+ {
+ "epoch": 0.36810352911756433,
+ "grad_norm": 0.9311390519142151,
+ "learning_rate": 0.00019265148271056722,
+ "loss": 1.7019,
+ "step": 368
+ },
+ {
+ "epoch": 0.3691038104466881,
+ "grad_norm": 0.9513958096504211,
+ "learning_rate": 0.0001926119907006426,
+ "loss": 1.7617,
+ "step": 369
+ },
+ {
+ "epoch": 0.37010409177581194,
+ "grad_norm": 0.7407613396644592,
+ "learning_rate": 0.00019257239692688907,
+ "loss": 2.1057,
+ "step": 370
+ },
+ {
+ "epoch": 0.37110437310493577,
+ "grad_norm": 0.7530227899551392,
+ "learning_rate": 0.00019253270143281296,
+ "loss": 1.9844,
+ "step": 371
+ },
+ {
+ "epoch": 0.3721046544340596,
+ "grad_norm": 0.6733037233352661,
+ "learning_rate": 0.00019249290426203252,
+ "loss": 1.8939,
+ "step": 372
+ },
+ {
+ "epoch": 0.3731049357631834,
+ "grad_norm": 0.7037007808685303,
+ "learning_rate": 0.0001924530054582776,
+ "loss": 1.6897,
+ "step": 373
+ },
+ {
+ "epoch": 0.3741052170923072,
+ "grad_norm": 0.7689145803451538,
+ "learning_rate": 0.0001924130050653898,
+ "loss": 1.6544,
+ "step": 374
+ },
+ {
+ "epoch": 0.37510549842143104,
+ "grad_norm": 0.7100968360900879,
+ "learning_rate": 0.00019237290312732226,
+ "loss": 1.7932,
+ "step": 375
+ },
+ {
+ "epoch": 0.37610577975055487,
+ "grad_norm": 0.7645193338394165,
+ "learning_rate": 0.00019233269968813984,
+ "loss": 1.6576,
+ "step": 376
+ },
+ {
+ "epoch": 0.37710606107967864,
+ "grad_norm": 0.6831678152084351,
+ "learning_rate": 0.00019229239479201876,
+ "loss": 1.6675,
+ "step": 377
+ },
+ {
+ "epoch": 0.3781063424088025,
+ "grad_norm": 0.8677794337272644,
+ "learning_rate": 0.0001922519884832469,
+ "loss": 1.6064,
+ "step": 378
+ },
+ {
+ "epoch": 0.3791066237379263,
+ "grad_norm": 0.6727691888809204,
+ "learning_rate": 0.0001922114808062234,
+ "loss": 1.6249,
+ "step": 379
+ },
+ {
+ "epoch": 0.3801069050670501,
+ "grad_norm": 0.6588670611381531,
+ "learning_rate": 0.00019217087180545893,
+ "loss": 1.5541,
+ "step": 380
+ },
+ {
+ "epoch": 0.3811071863961739,
+ "grad_norm": 0.7666369080543518,
+ "learning_rate": 0.0001921301615255754,
+ "loss": 1.7806,
+ "step": 381
+ },
+ {
+ "epoch": 0.38210746772529774,
+ "grad_norm": 0.6465156078338623,
+ "learning_rate": 0.0001920893500113061,
+ "loss": 1.512,
+ "step": 382
+ },
+ {
+ "epoch": 0.3831077490544216,
+ "grad_norm": 0.7854346632957458,
+ "learning_rate": 0.00019204843730749547,
+ "loss": 1.6857,
+ "step": 383
+ },
+ {
+ "epoch": 0.38410803038354535,
+ "grad_norm": 0.6625111103057861,
+ "learning_rate": 0.00019200742345909915,
+ "loss": 1.7033,
+ "step": 384
+ },
+ {
+ "epoch": 0.3851083117126692,
+ "grad_norm": 0.7273709177970886,
+ "learning_rate": 0.00019196630851118398,
+ "loss": 1.665,
+ "step": 385
+ },
+ {
+ "epoch": 0.386108593041793,
+ "grad_norm": 0.6861465573310852,
+ "learning_rate": 0.0001919250925089278,
+ "loss": 1.5028,
+ "step": 386
+ },
+ {
+ "epoch": 0.38710887437091684,
+ "grad_norm": 0.845456063747406,
+ "learning_rate": 0.00019188377549761963,
+ "loss": 1.967,
+ "step": 387
+ },
+ {
+ "epoch": 0.3881091557000406,
+ "grad_norm": 0.6481165289878845,
+ "learning_rate": 0.00019184235752265928,
+ "loss": 1.6053,
+ "step": 388
+ },
+ {
+ "epoch": 0.38910943702916445,
+ "grad_norm": 0.6312947273254395,
+ "learning_rate": 0.00019180083862955772,
+ "loss": 1.4427,
+ "step": 389
+ },
+ {
+ "epoch": 0.3901097183582883,
+ "grad_norm": 0.7874154448509216,
+ "learning_rate": 0.00019175921886393666,
+ "loss": 1.6099,
+ "step": 390
+ },
+ {
+ "epoch": 0.39110999968741206,
+ "grad_norm": 0.6839481592178345,
+ "learning_rate": 0.00019171749827152869,
+ "loss": 1.7004,
+ "step": 391
+ },
+ {
+ "epoch": 0.3921102810165359,
+ "grad_norm": 0.7239277362823486,
+ "learning_rate": 0.0001916756768981772,
+ "loss": 1.8813,
+ "step": 392
+ },
+ {
+ "epoch": 0.3931105623456597,
+ "grad_norm": 0.8241100311279297,
+ "learning_rate": 0.00019163375478983632,
+ "loss": 1.9443,
+ "step": 393
+ },
+ {
+ "epoch": 0.39411084367478355,
+ "grad_norm": 0.7401999235153198,
+ "learning_rate": 0.00019159173199257085,
+ "loss": 1.6663,
+ "step": 394
+ },
+ {
+ "epoch": 0.3951111250039073,
+ "grad_norm": 0.8297036290168762,
+ "learning_rate": 0.00019154960855255628,
+ "loss": 1.8012,
+ "step": 395
+ },
+ {
+ "epoch": 0.39611140633303116,
+ "grad_norm": 0.9661216735839844,
+ "learning_rate": 0.0001915073845160786,
+ "loss": 1.7007,
+ "step": 396
+ },
+ {
+ "epoch": 0.397111687662155,
+ "grad_norm": 1.4041926860809326,
+ "learning_rate": 0.00019146505992953446,
+ "loss": 1.7232,
+ "step": 397
+ },
+ {
+ "epoch": 0.3981119689912788,
+ "grad_norm": 0.8469036221504211,
+ "learning_rate": 0.00019142263483943085,
+ "loss": 1.4479,
+ "step": 398
+ },
+ {
+ "epoch": 0.3991122503204026,
+ "grad_norm": 0.9476561546325684,
+ "learning_rate": 0.00019138010929238534,
+ "loss": 1.8572,
+ "step": 399
+ },
+ {
+ "epoch": 0.4001125316495264,
+ "grad_norm": 0.7196705937385559,
+ "learning_rate": 0.00019133748333512575,
+ "loss": 1.6184,
+ "step": 400
+ },
+ {
+ "epoch": 0.40111281297865026,
+ "grad_norm": 0.8957480192184448,
+ "learning_rate": 0.00019129475701449035,
+ "loss": 1.762,
+ "step": 401
+ },
+ {
+ "epoch": 0.4021130943077741,
+ "grad_norm": 0.7850635647773743,
+ "learning_rate": 0.0001912519303774276,
+ "loss": 1.6764,
+ "step": 402
+ },
+ {
+ "epoch": 0.40311337563689786,
+ "grad_norm": 0.7579814791679382,
+ "learning_rate": 0.0001912090034709963,
+ "loss": 1.6231,
+ "step": 403
+ },
+ {
+ "epoch": 0.4041136569660217,
+ "grad_norm": 0.7173107266426086,
+ "learning_rate": 0.00019116597634236525,
+ "loss": 1.7107,
+ "step": 404
+ },
+ {
+ "epoch": 0.4051139382951455,
+ "grad_norm": 0.7832950353622437,
+ "learning_rate": 0.0001911228490388136,
+ "loss": 1.8608,
+ "step": 405
+ },
+ {
+ "epoch": 0.4061142196242693,
+ "grad_norm": 0.716299295425415,
+ "learning_rate": 0.00019107962160773035,
+ "loss": 1.652,
+ "step": 406
+ },
+ {
+ "epoch": 0.40711450095339313,
+ "grad_norm": 0.6675253510475159,
+ "learning_rate": 0.0001910362940966147,
+ "loss": 1.5963,
+ "step": 407
+ },
+ {
+ "epoch": 0.40811478228251696,
+ "grad_norm": 0.6555336713790894,
+ "learning_rate": 0.00019099286655307568,
+ "loss": 1.4991,
+ "step": 408
+ },
+ {
+ "epoch": 0.4091150636116408,
+ "grad_norm": 0.7307867407798767,
+ "learning_rate": 0.0001909493390248324,
+ "loss": 1.8221,
+ "step": 409
+ },
+ {
+ "epoch": 0.41011534494076457,
+ "grad_norm": 0.6557430624961853,
+ "learning_rate": 0.00019090571155971366,
+ "loss": 1.6484,
+ "step": 410
+ },
+ {
+ "epoch": 0.4111156262698884,
+ "grad_norm": 0.6816605925559998,
+ "learning_rate": 0.00019086198420565823,
+ "loss": 1.5052,
+ "step": 411
+ },
+ {
+ "epoch": 0.41211590759901223,
+ "grad_norm": 0.66513592004776,
+ "learning_rate": 0.00019081815701071445,
+ "loss": 1.8818,
+ "step": 412
+ },
+ {
+ "epoch": 0.41311618892813606,
+ "grad_norm": 0.6807469129562378,
+ "learning_rate": 0.0001907742300230406,
+ "loss": 1.5997,
+ "step": 413
+ },
+ {
+ "epoch": 0.41411647025725984,
+ "grad_norm": 0.8060654401779175,
+ "learning_rate": 0.00019073020329090444,
+ "loss": 1.8099,
+ "step": 414
+ },
+ {
+ "epoch": 0.41511675158638367,
+ "grad_norm": 0.7440110445022583,
+ "learning_rate": 0.0001906860768626834,
+ "loss": 1.4876,
+ "step": 415
+ },
+ {
+ "epoch": 0.4161170329155075,
+ "grad_norm": 0.7675415277481079,
+ "learning_rate": 0.00019064185078686443,
+ "loss": 1.4722,
+ "step": 416
+ },
+ {
+ "epoch": 0.4171173142446313,
+ "grad_norm": 0.6656553149223328,
+ "learning_rate": 0.000190597525112044,
+ "loss": 1.6453,
+ "step": 417
+ },
+ {
+ "epoch": 0.4181175955737551,
+ "grad_norm": 0.730689287185669,
+ "learning_rate": 0.000190553099886928,
+ "loss": 1.6584,
+ "step": 418
+ },
+ {
+ "epoch": 0.41911787690287894,
+ "grad_norm": 0.8425858616828918,
+ "learning_rate": 0.00019050857516033173,
+ "loss": 1.6249,
+ "step": 419
+ },
+ {
+ "epoch": 0.42011815823200277,
+ "grad_norm": 0.7816892266273499,
+ "learning_rate": 0.00019046395098117983,
+ "loss": 1.7532,
+ "step": 420
+ },
+ {
+ "epoch": 0.42111843956112655,
+ "grad_norm": 0.7324026823043823,
+ "learning_rate": 0.00019041922739850616,
+ "loss": 1.8523,
+ "step": 421
+ },
+ {
+ "epoch": 0.4221187208902504,
+ "grad_norm": 0.7473389506340027,
+ "learning_rate": 0.00019037440446145385,
+ "loss": 1.583,
+ "step": 422
+ },
+ {
+ "epoch": 0.4231190022193742,
+ "grad_norm": 0.8720895051956177,
+ "learning_rate": 0.00019032948221927524,
+ "loss": 1.6806,
+ "step": 423
+ },
+ {
+ "epoch": 0.42411928354849804,
+ "grad_norm": 0.728528618812561,
+ "learning_rate": 0.00019028446072133175,
+ "loss": 1.7283,
+ "step": 424
+ },
+ {
+ "epoch": 0.4251195648776218,
+ "grad_norm": 0.739930272102356,
+ "learning_rate": 0.00019023934001709383,
+ "loss": 1.7244,
+ "step": 425
+ },
+ {
+ "epoch": 0.42611984620674564,
+ "grad_norm": 0.7825399041175842,
+ "learning_rate": 0.00019019412015614098,
+ "loss": 1.7871,
+ "step": 426
+ },
+ {
+ "epoch": 0.4271201275358695,
+ "grad_norm": 0.8878734707832336,
+ "learning_rate": 0.00019014880118816164,
+ "loss": 1.6018,
+ "step": 427
+ },
+ {
+ "epoch": 0.4281204088649933,
+ "grad_norm": 0.726259708404541,
+ "learning_rate": 0.0001901033831629532,
+ "loss": 1.7732,
+ "step": 428
+ },
+ {
+ "epoch": 0.4291206901941171,
+ "grad_norm": 0.7620319724082947,
+ "learning_rate": 0.00019005786613042185,
+ "loss": 1.6466,
+ "step": 429
+ },
+ {
+ "epoch": 0.4301209715232409,
+ "grad_norm": 0.7295501828193665,
+ "learning_rate": 0.00019001225014058255,
+ "loss": 1.8708,
+ "step": 430
+ },
+ {
+ "epoch": 0.43112125285236474,
+ "grad_norm": 0.7419458031654358,
+ "learning_rate": 0.00018996653524355902,
+ "loss": 1.6583,
+ "step": 431
+ },
+ {
+ "epoch": 0.4321215341814885,
+ "grad_norm": 0.7701705098152161,
+ "learning_rate": 0.00018992072148958368,
+ "loss": 1.4421,
+ "step": 432
+ },
+ {
+ "epoch": 0.43312181551061235,
+ "grad_norm": 0.8237659931182861,
+ "learning_rate": 0.00018987480892899758,
+ "loss": 1.844,
+ "step": 433
+ },
+ {
+ "epoch": 0.4341220968397362,
+ "grad_norm": 0.6167672276496887,
+ "learning_rate": 0.00018982879761225027,
+ "loss": 1.6193,
+ "step": 434
+ },
+ {
+ "epoch": 0.43512237816886,
+ "grad_norm": 0.7565534710884094,
+ "learning_rate": 0.00018978268758989991,
+ "loss": 1.7655,
+ "step": 435
+ },
+ {
+ "epoch": 0.4361226594979838,
+ "grad_norm": 0.8333333730697632,
+ "learning_rate": 0.00018973647891261307,
+ "loss": 1.5764,
+ "step": 436
+ },
+ {
+ "epoch": 0.4371229408271076,
+ "grad_norm": 0.7404434084892273,
+ "learning_rate": 0.00018969017163116472,
+ "loss": 1.7922,
+ "step": 437
+ },
+ {
+ "epoch": 0.43812322215623145,
+ "grad_norm": 0.7129400372505188,
+ "learning_rate": 0.0001896437657964382,
+ "loss": 1.6925,
+ "step": 438
+ },
+ {
+ "epoch": 0.4391235034853553,
+ "grad_norm": 0.7750307321548462,
+ "learning_rate": 0.00018959726145942508,
+ "loss": 1.8133,
+ "step": 439
+ },
+ {
+ "epoch": 0.44012378481447906,
+ "grad_norm": 0.7244920134544373,
+ "learning_rate": 0.00018955065867122528,
+ "loss": 1.6425,
+ "step": 440
+ },
+ {
+ "epoch": 0.4411240661436029,
+ "grad_norm": 0.7624644637107849,
+ "learning_rate": 0.00018950395748304678,
+ "loss": 1.886,
+ "step": 441
+ },
+ {
+ "epoch": 0.4421243474727267,
+ "grad_norm": 0.7016286849975586,
+ "learning_rate": 0.0001894571579462058,
+ "loss": 1.7308,
+ "step": 442
+ },
+ {
+ "epoch": 0.4431246288018505,
+ "grad_norm": 0.6965353488922119,
+ "learning_rate": 0.00018941026011212654,
+ "loss": 1.5872,
+ "step": 443
+ },
+ {
+ "epoch": 0.4441249101309743,
+ "grad_norm": 0.7479360699653625,
+ "learning_rate": 0.00018936326403234125,
+ "loss": 1.8129,
+ "step": 444
+ },
+ {
+ "epoch": 0.44512519146009816,
+ "grad_norm": 0.7027686834335327,
+ "learning_rate": 0.00018931616975849006,
+ "loss": 1.6433,
+ "step": 445
+ },
+ {
+ "epoch": 0.446125472789222,
+ "grad_norm": 0.7771592140197754,
+ "learning_rate": 0.00018926897734232115,
+ "loss": 1.4645,
+ "step": 446
+ },
+ {
+ "epoch": 0.44712575411834576,
+ "grad_norm": 0.7766458988189697,
+ "learning_rate": 0.0001892216868356904,
+ "loss": 1.7873,
+ "step": 447
+ },
+ {
+ "epoch": 0.4481260354474696,
+ "grad_norm": 0.8146182298660278,
+ "learning_rate": 0.0001891742982905615,
+ "loss": 1.7935,
+ "step": 448
+ },
+ {
+ "epoch": 0.4491263167765934,
+ "grad_norm": 0.6744781136512756,
+ "learning_rate": 0.00018912681175900598,
+ "loss": 1.8916,
+ "step": 449
+ },
+ {
+ "epoch": 0.45012659810571726,
+ "grad_norm": 0.6259024143218994,
+ "learning_rate": 0.00018907922729320285,
+ "loss": 1.6616,
+ "step": 450
+ },
+ {
+ "epoch": 0.45112687943484103,
+ "grad_norm": 0.7717494368553162,
+ "learning_rate": 0.00018903154494543889,
+ "loss": 1.817,
+ "step": 451
+ },
+ {
+ "epoch": 0.45212716076396486,
+ "grad_norm": 0.648040771484375,
+ "learning_rate": 0.00018898376476810834,
+ "loss": 1.6309,
+ "step": 452
+ },
+ {
+ "epoch": 0.4531274420930887,
+ "grad_norm": 0.7560017704963684,
+ "learning_rate": 0.00018893588681371303,
+ "loss": 1.8016,
+ "step": 453
+ },
+ {
+ "epoch": 0.45412772342221247,
+ "grad_norm": 0.8778628706932068,
+ "learning_rate": 0.00018888791113486213,
+ "loss": 1.7797,
+ "step": 454
+ },
+ {
+ "epoch": 0.4551280047513363,
+ "grad_norm": 0.6795655488967896,
+ "learning_rate": 0.00018883983778427227,
+ "loss": 1.6343,
+ "step": 455
+ },
+ {
+ "epoch": 0.45612828608046013,
+ "grad_norm": 0.6690213084220886,
+ "learning_rate": 0.0001887916668147673,
+ "loss": 1.6224,
+ "step": 456
+ },
+ {
+ "epoch": 0.45712856740958396,
+ "grad_norm": 0.7529327869415283,
+ "learning_rate": 0.00018874339827927846,
+ "loss": 1.6396,
+ "step": 457
+ },
+ {
+ "epoch": 0.45812884873870774,
+ "grad_norm": 0.8393098711967468,
+ "learning_rate": 0.00018869503223084414,
+ "loss": 1.8374,
+ "step": 458
+ },
+ {
+ "epoch": 0.45912913006783157,
+ "grad_norm": 0.7435776591300964,
+ "learning_rate": 0.00018864656872260985,
+ "loss": 1.7363,
+ "step": 459
+ },
+ {
+ "epoch": 0.4601294113969554,
+ "grad_norm": 0.6737055778503418,
+ "learning_rate": 0.00018859800780782828,
+ "loss": 1.6661,
+ "step": 460
+ },
+ {
+ "epoch": 0.46112969272607923,
+ "grad_norm": 0.7674340605735779,
+ "learning_rate": 0.000188549349539859,
+ "loss": 1.6269,
+ "step": 461
+ },
+ {
+ "epoch": 0.462129974055203,
+ "grad_norm": 0.7329950928688049,
+ "learning_rate": 0.00018850059397216876,
+ "loss": 1.6989,
+ "step": 462
+ },
+ {
+ "epoch": 0.46313025538432684,
+ "grad_norm": 0.7075778841972351,
+ "learning_rate": 0.00018845174115833099,
+ "loss": 1.7286,
+ "step": 463
+ },
+ {
+ "epoch": 0.46413053671345067,
+ "grad_norm": 0.7973611354827881,
+ "learning_rate": 0.0001884027911520262,
+ "loss": 1.7478,
+ "step": 464
+ },
+ {
+ "epoch": 0.4651308180425745,
+ "grad_norm": 0.7790057063102722,
+ "learning_rate": 0.00018835374400704154,
+ "loss": 1.6659,
+ "step": 465
+ },
+ {
+ "epoch": 0.4661310993716983,
+ "grad_norm": 0.8505310416221619,
+ "learning_rate": 0.00018830459977727096,
+ "loss": 1.6879,
+ "step": 466
+ },
+ {
+ "epoch": 0.4671313807008221,
+ "grad_norm": 0.7616267800331116,
+ "learning_rate": 0.0001882553585167151,
+ "loss": 1.6525,
+ "step": 467
+ },
+ {
+ "epoch": 0.46813166202994594,
+ "grad_norm": 0.8038133978843689,
+ "learning_rate": 0.00018820602027948114,
+ "loss": 1.7929,
+ "step": 468
+ },
+ {
+ "epoch": 0.4691319433590697,
+ "grad_norm": 0.6762365698814392,
+ "learning_rate": 0.00018815658511978298,
+ "loss": 1.6926,
+ "step": 469
+ },
+ {
+ "epoch": 0.47013222468819355,
+ "grad_norm": 0.6515015959739685,
+ "learning_rate": 0.00018810705309194083,
+ "loss": 1.7817,
+ "step": 470
+ },
+ {
+ "epoch": 0.4711325060173174,
+ "grad_norm": 0.696675181388855,
+ "learning_rate": 0.00018805742425038145,
+ "loss": 1.7195,
+ "step": 471
+ },
+ {
+ "epoch": 0.4721327873464412,
+ "grad_norm": 0.7929533123970032,
+ "learning_rate": 0.00018800769864963802,
+ "loss": 2.0165,
+ "step": 472
+ },
+ {
+ "epoch": 0.473133068675565,
+ "grad_norm": 0.7223886251449585,
+ "learning_rate": 0.00018795787634434994,
+ "loss": 1.6708,
+ "step": 473
+ },
+ {
+ "epoch": 0.4741333500046888,
+ "grad_norm": 0.7982028126716614,
+ "learning_rate": 0.0001879079573892629,
+ "loss": 1.628,
+ "step": 474
+ },
+ {
+ "epoch": 0.47513363133381264,
+ "grad_norm": 0.6962152123451233,
+ "learning_rate": 0.00018785794183922883,
+ "loss": 1.6429,
+ "step": 475
+ },
+ {
+ "epoch": 0.4761339126629365,
+ "grad_norm": 0.687489926815033,
+ "learning_rate": 0.00018780782974920572,
+ "loss": 1.4546,
+ "step": 476
+ },
+ {
+ "epoch": 0.47713419399206025,
+ "grad_norm": 0.7260375022888184,
+ "learning_rate": 0.00018775762117425777,
+ "loss": 1.739,
+ "step": 477
+ },
+ {
+ "epoch": 0.4781344753211841,
+ "grad_norm": 0.759400486946106,
+ "learning_rate": 0.0001877073161695551,
+ "loss": 1.6465,
+ "step": 478
+ },
+ {
+ "epoch": 0.4791347566503079,
+ "grad_norm": 0.7412364482879639,
+ "learning_rate": 0.00018765691479037376,
+ "loss": 1.7333,
+ "step": 479
+ },
+ {
+ "epoch": 0.4801350379794317,
+ "grad_norm": 0.6909674406051636,
+ "learning_rate": 0.00018760641709209583,
+ "loss": 1.6936,
+ "step": 480
+ },
+ {
+ "epoch": 0.4811353193085555,
+ "grad_norm": 0.6478050947189331,
+ "learning_rate": 0.0001875558231302091,
+ "loss": 1.4435,
+ "step": 481
+ },
+ {
+ "epoch": 0.48213560063767935,
+ "grad_norm": 0.6662638187408447,
+ "learning_rate": 0.00018750513296030718,
+ "loss": 1.5567,
+ "step": 482
+ },
+ {
+ "epoch": 0.4831358819668032,
+ "grad_norm": 0.6973413825035095,
+ "learning_rate": 0.00018745434663808942,
+ "loss": 1.6434,
+ "step": 483
+ },
+ {
+ "epoch": 0.48413616329592696,
+ "grad_norm": 0.8381956815719604,
+ "learning_rate": 0.0001874034642193608,
+ "loss": 1.8568,
+ "step": 484
+ },
+ {
+ "epoch": 0.4851364446250508,
+ "grad_norm": 0.6522262096405029,
+ "learning_rate": 0.0001873524857600319,
+ "loss": 1.4265,
+ "step": 485
+ },
+ {
+ "epoch": 0.4861367259541746,
+ "grad_norm": 0.7018440961837769,
+ "learning_rate": 0.00018730141131611882,
+ "loss": 1.6914,
+ "step": 486
+ },
+ {
+ "epoch": 0.48713700728329845,
+ "grad_norm": 0.8237236142158508,
+ "learning_rate": 0.00018725024094374315,
+ "loss": 1.4462,
+ "step": 487
+ },
+ {
+ "epoch": 0.4881372886124222,
+ "grad_norm": 0.6507928967475891,
+ "learning_rate": 0.00018719897469913184,
+ "loss": 1.5802,
+ "step": 488
+ },
+ {
+ "epoch": 0.48913756994154606,
+ "grad_norm": 0.8120100498199463,
+ "learning_rate": 0.00018714761263861728,
+ "loss": 1.7819,
+ "step": 489
+ },
+ {
+ "epoch": 0.4901378512706699,
+ "grad_norm": 0.759722888469696,
+ "learning_rate": 0.000187096154818637,
+ "loss": 1.7481,
+ "step": 490
+ },
+ {
+ "epoch": 0.4911381325997937,
+ "grad_norm": 0.7146822214126587,
+ "learning_rate": 0.00018704460129573391,
+ "loss": 1.6217,
+ "step": 491
+ },
+ {
+ "epoch": 0.4921384139289175,
+ "grad_norm": 0.7138429284095764,
+ "learning_rate": 0.00018699295212655596,
+ "loss": 1.7242,
+ "step": 492
+ },
+ {
+ "epoch": 0.4931386952580413,
+ "grad_norm": 0.8145613670349121,
+ "learning_rate": 0.00018694120736785632,
+ "loss": 1.8759,
+ "step": 493
+ },
+ {
+ "epoch": 0.49413897658716516,
+ "grad_norm": 0.6624435186386108,
+ "learning_rate": 0.00018688936707649304,
+ "loss": 1.8632,
+ "step": 494
+ },
+ {
+ "epoch": 0.49513925791628893,
+ "grad_norm": 0.6550843119621277,
+ "learning_rate": 0.00018683743130942928,
+ "loss": 1.7645,
+ "step": 495
+ },
+ {
+ "epoch": 0.49613953924541276,
+ "grad_norm": 0.6931488513946533,
+ "learning_rate": 0.00018678540012373302,
+ "loss": 1.5899,
+ "step": 496
+ },
+ {
+ "epoch": 0.4971398205745366,
+ "grad_norm": 0.684229850769043,
+ "learning_rate": 0.00018673327357657715,
+ "loss": 1.6902,
+ "step": 497
+ },
+ {
+ "epoch": 0.4981401019036604,
+ "grad_norm": 0.7380666136741638,
+ "learning_rate": 0.0001866810517252393,
+ "loss": 1.7115,
+ "step": 498
+ },
+ {
+ "epoch": 0.4991403832327842,
+ "grad_norm": 0.6782827973365784,
+ "learning_rate": 0.00018662873462710184,
+ "loss": 1.495,
+ "step": 499
+ },
+ {
+ "epoch": 0.5001406645619081,
+ "grad_norm": 0.730248749256134,
+ "learning_rate": 0.0001865763223396518,
+ "loss": 1.5147,
+ "step": 500
+ },
+ {
+ "epoch": 0.5011409458910319,
+ "grad_norm": 0.7644149661064148,
+ "learning_rate": 0.00018652381492048083,
+ "loss": 1.7278,
+ "step": 501
+ },
+ {
+ "epoch": 0.5021412272201556,
+ "grad_norm": 0.6977668404579163,
+ "learning_rate": 0.00018647121242728506,
+ "loss": 1.5194,
+ "step": 502
+ },
+ {
+ "epoch": 0.5031415085492795,
+ "grad_norm": 0.7714502215385437,
+ "learning_rate": 0.00018641851491786512,
+ "loss": 2.0039,
+ "step": 503
+ },
+ {
+ "epoch": 0.5041417898784033,
+ "grad_norm": 0.9013757109642029,
+ "learning_rate": 0.00018636572245012606,
+ "loss": 1.8069,
+ "step": 504
+ },
+ {
+ "epoch": 0.5051420712075271,
+ "grad_norm": 0.7173192501068115,
+ "learning_rate": 0.00018631283508207725,
+ "loss": 1.841,
+ "step": 505
+ },
+ {
+ "epoch": 0.506142352536651,
+ "grad_norm": 0.7639481425285339,
+ "learning_rate": 0.00018625985287183233,
+ "loss": 1.5333,
+ "step": 506
+ },
+ {
+ "epoch": 0.5071426338657747,
+ "grad_norm": 0.8779808282852173,
+ "learning_rate": 0.00018620677587760916,
+ "loss": 1.8304,
+ "step": 507
+ },
+ {
+ "epoch": 0.5081429151948985,
+ "grad_norm": 0.8956230282783508,
+ "learning_rate": 0.00018615360415772978,
+ "loss": 1.7228,
+ "step": 508
+ },
+ {
+ "epoch": 0.5091431965240224,
+ "grad_norm": 0.8217945098876953,
+ "learning_rate": 0.00018610033777062025,
+ "loss": 1.4977,
+ "step": 509
+ },
+ {
+ "epoch": 0.5101434778531462,
+ "grad_norm": 0.7628902196884155,
+ "learning_rate": 0.0001860469767748108,
+ "loss": 1.7634,
+ "step": 510
+ },
+ {
+ "epoch": 0.5111437591822701,
+ "grad_norm": 0.635911226272583,
+ "learning_rate": 0.00018599352122893539,
+ "loss": 1.5103,
+ "step": 511
+ },
+ {
+ "epoch": 0.5121440405113938,
+ "grad_norm": 0.6439646482467651,
+ "learning_rate": 0.00018593997119173205,
+ "loss": 1.7281,
+ "step": 512
+ },
+ {
+ "epoch": 0.5131443218405176,
+ "grad_norm": 0.7753567099571228,
+ "learning_rate": 0.00018588632672204264,
+ "loss": 1.9028,
+ "step": 513
+ },
+ {
+ "epoch": 0.5141446031696415,
+ "grad_norm": 0.8296717405319214,
+ "learning_rate": 0.0001858325878788126,
+ "loss": 1.9049,
+ "step": 514
+ },
+ {
+ "epoch": 0.5151448844987653,
+ "grad_norm": 0.7379167079925537,
+ "learning_rate": 0.00018577875472109134,
+ "loss": 1.6262,
+ "step": 515
+ },
+ {
+ "epoch": 0.516145165827889,
+ "grad_norm": 0.634040355682373,
+ "learning_rate": 0.0001857248273080317,
+ "loss": 1.5416,
+ "step": 516
+ },
+ {
+ "epoch": 0.5171454471570129,
+ "grad_norm": 0.7394217252731323,
+ "learning_rate": 0.00018567080569889015,
+ "loss": 1.6035,
+ "step": 517
+ },
+ {
+ "epoch": 0.5181457284861367,
+ "grad_norm": 0.705426037311554,
+ "learning_rate": 0.00018561668995302667,
+ "loss": 1.616,
+ "step": 518
+ },
+ {
+ "epoch": 0.5191460098152605,
+ "grad_norm": 0.778021514415741,
+ "learning_rate": 0.00018556248012990468,
+ "loss": 1.5206,
+ "step": 519
+ },
+ {
+ "epoch": 0.5201462911443844,
+ "grad_norm": 0.7203211188316345,
+ "learning_rate": 0.000185508176289091,
+ "loss": 1.5369,
+ "step": 520
+ },
+ {
+ "epoch": 0.5211465724735082,
+ "grad_norm": 0.7390999794006348,
+ "learning_rate": 0.00018545377849025566,
+ "loss": 1.6438,
+ "step": 521
+ },
+ {
+ "epoch": 0.522146853802632,
+ "grad_norm": 0.6779179573059082,
+ "learning_rate": 0.0001853992867931721,
+ "loss": 1.6268,
+ "step": 522
+ },
+ {
+ "epoch": 0.5231471351317558,
+ "grad_norm": 0.6589105725288391,
+ "learning_rate": 0.00018534470125771674,
+ "loss": 1.8353,
+ "step": 523
+ },
+ {
+ "epoch": 0.5241474164608796,
+ "grad_norm": 0.692081868648529,
+ "learning_rate": 0.0001852900219438693,
+ "loss": 1.7047,
+ "step": 524
+ },
+ {
+ "epoch": 0.5251476977900035,
+ "grad_norm": 0.6639224886894226,
+ "learning_rate": 0.0001852352489117124,
+ "loss": 1.7448,
+ "step": 525
+ },
+ {
+ "epoch": 0.5261479791191273,
+ "grad_norm": 0.7168188095092773,
+ "learning_rate": 0.00018518038222143174,
+ "loss": 1.6734,
+ "step": 526
+ },
+ {
+ "epoch": 0.527148260448251,
+ "grad_norm": 0.7671873569488525,
+ "learning_rate": 0.00018512542193331583,
+ "loss": 1.9392,
+ "step": 527
+ },
+ {
+ "epoch": 0.5281485417773749,
+ "grad_norm": 0.7861583828926086,
+ "learning_rate": 0.00018507036810775615,
+ "loss": 1.5749,
+ "step": 528
+ },
+ {
+ "epoch": 0.5291488231064987,
+ "grad_norm": 0.6727952361106873,
+ "learning_rate": 0.00018501522080524688,
+ "loss": 1.7584,
+ "step": 529
+ },
+ {
+ "epoch": 0.5301491044356225,
+ "grad_norm": 0.7287748456001282,
+ "learning_rate": 0.0001849599800863849,
+ "loss": 1.783,
+ "step": 530
+ },
+ {
+ "epoch": 0.5311493857647464,
+ "grad_norm": 0.6883361339569092,
+ "learning_rate": 0.0001849046460118698,
+ "loss": 1.6104,
+ "step": 531
+ },
+ {
+ "epoch": 0.5321496670938701,
+ "grad_norm": 0.6767789125442505,
+ "learning_rate": 0.0001848492186425037,
+ "loss": 1.8416,
+ "step": 532
+ },
+ {
+ "epoch": 0.533149948422994,
+ "grad_norm": 0.7468088269233704,
+ "learning_rate": 0.0001847936980391913,
+ "loss": 1.8297,
+ "step": 533
+ },
+ {
+ "epoch": 0.5341502297521178,
+ "grad_norm": 0.7560007572174072,
+ "learning_rate": 0.00018473808426293964,
+ "loss": 1.5405,
+ "step": 534
+ },
+ {
+ "epoch": 0.5351505110812416,
+ "grad_norm": 0.6900463104248047,
+ "learning_rate": 0.00018468237737485823,
+ "loss": 1.5117,
+ "step": 535
+ },
+ {
+ "epoch": 0.5361507924103655,
+ "grad_norm": 0.8691229820251465,
+ "learning_rate": 0.00018462657743615888,
+ "loss": 1.724,
+ "step": 536
+ },
+ {
+ "epoch": 0.5371510737394892,
+ "grad_norm": 0.7081372141838074,
+ "learning_rate": 0.00018457068450815562,
+ "loss": 1.6418,
+ "step": 537
+ },
+ {
+ "epoch": 0.538151355068613,
+ "grad_norm": 0.8148525953292847,
+ "learning_rate": 0.00018451469865226464,
+ "loss": 1.8564,
+ "step": 538
+ },
+ {
+ "epoch": 0.5391516363977369,
+ "grad_norm": 0.7306076288223267,
+ "learning_rate": 0.00018445861993000436,
+ "loss": 1.4974,
+ "step": 539
+ },
+ {
+ "epoch": 0.5401519177268607,
+ "grad_norm": 0.815825343132019,
+ "learning_rate": 0.00018440244840299506,
+ "loss": 1.8965,
+ "step": 540
+ },
+ {
+ "epoch": 0.5411521990559846,
+ "grad_norm": 0.753034234046936,
+ "learning_rate": 0.0001843461841329591,
+ "loss": 2.016,
+ "step": 541
+ },
+ {
+ "epoch": 0.5421524803851083,
+ "grad_norm": 0.8658022284507751,
+ "learning_rate": 0.0001842898271817208,
+ "loss": 1.6697,
+ "step": 542
+ },
+ {
+ "epoch": 0.5431527617142321,
+ "grad_norm": 0.7143135666847229,
+ "learning_rate": 0.00018423337761120618,
+ "loss": 1.8741,
+ "step": 543
+ },
+ {
+ "epoch": 0.544153043043356,
+ "grad_norm": 0.6843370795249939,
+ "learning_rate": 0.00018417683548344318,
+ "loss": 1.763,
+ "step": 544
+ },
+ {
+ "epoch": 0.5451533243724798,
+ "grad_norm": 0.6699584126472473,
+ "learning_rate": 0.00018412020086056133,
+ "loss": 1.7126,
+ "step": 545
+ },
+ {
+ "epoch": 0.5461536057016035,
+ "grad_norm": 0.6921600699424744,
+ "learning_rate": 0.0001840634738047918,
+ "loss": 1.6697,
+ "step": 546
+ },
+ {
+ "epoch": 0.5471538870307274,
+ "grad_norm": 0.822501003742218,
+ "learning_rate": 0.0001840066543784675,
+ "loss": 1.7045,
+ "step": 547
+ },
+ {
+ "epoch": 0.5481541683598512,
+ "grad_norm": 0.7563886046409607,
+ "learning_rate": 0.00018394974264402257,
+ "loss": 1.6853,
+ "step": 548
+ },
+ {
+ "epoch": 0.549154449688975,
+ "grad_norm": 0.7408218383789062,
+ "learning_rate": 0.00018389273866399275,
+ "loss": 1.6496,
+ "step": 549
+ },
+ {
+ "epoch": 0.5501547310180989,
+ "grad_norm": 0.6454717516899109,
+ "learning_rate": 0.00018383564250101512,
+ "loss": 1.5063,
+ "step": 550
+ },
+ {
+ "epoch": 0.5511550123472226,
+ "grad_norm": 0.7033074498176575,
+ "learning_rate": 0.000183778454217828,
+ "loss": 1.6432,
+ "step": 551
+ },
+ {
+ "epoch": 0.5521552936763465,
+ "grad_norm": 0.768194854259491,
+ "learning_rate": 0.0001837211738772711,
+ "loss": 2.0594,
+ "step": 552
+ },
+ {
+ "epoch": 0.5531555750054703,
+ "grad_norm": 0.7805166244506836,
+ "learning_rate": 0.000183663801542285,
+ "loss": 1.4317,
+ "step": 553
+ },
+ {
+ "epoch": 0.5541558563345941,
+ "grad_norm": 0.6603556871414185,
+ "learning_rate": 0.00018360633727591155,
+ "loss": 1.4171,
+ "step": 554
+ },
+ {
+ "epoch": 0.555156137663718,
+ "grad_norm": 0.6996607780456543,
+ "learning_rate": 0.00018354878114129367,
+ "loss": 1.6832,
+ "step": 555
+ },
+ {
+ "epoch": 0.5561564189928417,
+ "grad_norm": 0.7861623167991638,
+ "learning_rate": 0.00018349113320167504,
+ "loss": 1.8425,
+ "step": 556
+ },
+ {
+ "epoch": 0.5571567003219655,
+ "grad_norm": 0.8387210369110107,
+ "learning_rate": 0.00018343339352040042,
+ "loss": 2.1272,
+ "step": 557
+ },
+ {
+ "epoch": 0.5581569816510894,
+ "grad_norm": 0.829555094242096,
+ "learning_rate": 0.00018337556216091517,
+ "loss": 1.4835,
+ "step": 558
+ },
+ {
+ "epoch": 0.5591572629802132,
+ "grad_norm": 0.7824863791465759,
+ "learning_rate": 0.00018331763918676556,
+ "loss": 1.8893,
+ "step": 559
+ },
+ {
+ "epoch": 0.560157544309337,
+ "grad_norm": 0.707683801651001,
+ "learning_rate": 0.00018325962466159848,
+ "loss": 1.6492,
+ "step": 560
+ },
+ {
+ "epoch": 0.5611578256384608,
+ "grad_norm": 0.775600254535675,
+ "learning_rate": 0.00018320151864916135,
+ "loss": 1.6542,
+ "step": 561
+ },
+ {
+ "epoch": 0.5621581069675846,
+ "grad_norm": 0.7602002024650574,
+ "learning_rate": 0.00018314332121330225,
+ "loss": 1.9625,
+ "step": 562
+ },
+ {
+ "epoch": 0.5631583882967085,
+ "grad_norm": 0.7535431385040283,
+ "learning_rate": 0.0001830850324179695,
+ "loss": 1.6407,
+ "step": 563
+ },
+ {
+ "epoch": 0.5641586696258323,
+ "grad_norm": 1.1884644031524658,
+ "learning_rate": 0.00018302665232721208,
+ "loss": 1.6188,
+ "step": 564
+ },
+ {
+ "epoch": 0.565158950954956,
+ "grad_norm": 0.7576595544815063,
+ "learning_rate": 0.0001829681810051791,
+ "loss": 1.7739,
+ "step": 565
+ },
+ {
+ "epoch": 0.5661592322840799,
+ "grad_norm": 0.6807442307472229,
+ "learning_rate": 0.00018290961851611995,
+ "loss": 1.6244,
+ "step": 566
+ },
+ {
+ "epoch": 0.5671595136132037,
+ "grad_norm": 0.7222456932067871,
+ "learning_rate": 0.00018285096492438424,
+ "loss": 1.7809,
+ "step": 567
+ },
+ {
+ "epoch": 0.5681597949423275,
+ "grad_norm": 0.7002213597297668,
+ "learning_rate": 0.00018279222029442163,
+ "loss": 1.5462,
+ "step": 568
+ },
+ {
+ "epoch": 0.5691600762714514,
+ "grad_norm": 0.8433569669723511,
+ "learning_rate": 0.00018273338469078186,
+ "loss": 1.5042,
+ "step": 569
+ },
+ {
+ "epoch": 0.5701603576005752,
+ "grad_norm": 0.663144588470459,
+ "learning_rate": 0.00018267445817811466,
+ "loss": 1.7133,
+ "step": 570
+ },
+ {
+ "epoch": 0.5711606389296989,
+ "grad_norm": 0.7298465967178345,
+ "learning_rate": 0.00018261544082116954,
+ "loss": 2.0201,
+ "step": 571
+ },
+ {
+ "epoch": 0.5721609202588228,
+ "grad_norm": 0.7613754868507385,
+ "learning_rate": 0.00018255633268479595,
+ "loss": 1.8065,
+ "step": 572
+ },
+ {
+ "epoch": 0.5731612015879466,
+ "grad_norm": 0.7252177596092224,
+ "learning_rate": 0.00018249713383394303,
+ "loss": 1.5715,
+ "step": 573
+ },
+ {
+ "epoch": 0.5741614829170705,
+ "grad_norm": 0.783961832523346,
+ "learning_rate": 0.0001824378443336596,
+ "loss": 1.7102,
+ "step": 574
+ },
+ {
+ "epoch": 0.5751617642461943,
+ "grad_norm": 0.8532115817070007,
+ "learning_rate": 0.00018237846424909413,
+ "loss": 1.7011,
+ "step": 575
+ },
+ {
+ "epoch": 0.576162045575318,
+ "grad_norm": 0.7841559052467346,
+ "learning_rate": 0.00018231899364549455,
+ "loss": 1.6397,
+ "step": 576
+ },
+ {
+ "epoch": 0.5771623269044419,
+ "grad_norm": 0.7118051648139954,
+ "learning_rate": 0.00018225943258820833,
+ "loss": 1.7166,
+ "step": 577
+ },
+ {
+ "epoch": 0.5781626082335657,
+ "grad_norm": 0.7298933863639832,
+ "learning_rate": 0.00018219978114268227,
+ "loss": 1.604,
+ "step": 578
+ },
+ {
+ "epoch": 0.5791628895626895,
+ "grad_norm": 0.6644678711891174,
+ "learning_rate": 0.00018214003937446253,
+ "loss": 1.7673,
+ "step": 579
+ },
+ {
+ "epoch": 0.5801631708918134,
+ "grad_norm": 0.6707085371017456,
+ "learning_rate": 0.00018208020734919455,
+ "loss": 1.662,
+ "step": 580
+ },
+ {
+ "epoch": 0.5811634522209371,
+ "grad_norm": 0.7431498765945435,
+ "learning_rate": 0.00018202028513262288,
+ "loss": 1.7757,
+ "step": 581
+ },
+ {
+ "epoch": 0.5821637335500609,
+ "grad_norm": 0.6936060190200806,
+ "learning_rate": 0.00018196027279059117,
+ "loss": 1.8464,
+ "step": 582
+ },
+ {
+ "epoch": 0.5831640148791848,
+ "grad_norm": 0.6512508988380432,
+ "learning_rate": 0.00018190017038904215,
+ "loss": 1.5537,
+ "step": 583
+ },
+ {
+ "epoch": 0.5841642962083086,
+ "grad_norm": 0.7541425228118896,
+ "learning_rate": 0.0001818399779940175,
+ "loss": 1.8552,
+ "step": 584
+ },
+ {
+ "epoch": 0.5851645775374325,
+ "grad_norm": 0.720447301864624,
+ "learning_rate": 0.0001817796956716578,
+ "loss": 1.5392,
+ "step": 585
+ },
+ {
+ "epoch": 0.5861648588665562,
+ "grad_norm": 0.7526831030845642,
+ "learning_rate": 0.00018171932348820234,
+ "loss": 1.8224,
+ "step": 586
+ },
+ {
+ "epoch": 0.58716514019568,
+ "grad_norm": 0.6906039714813232,
+ "learning_rate": 0.0001816588615099893,
+ "loss": 1.6498,
+ "step": 587
+ },
+ {
+ "epoch": 0.5881654215248039,
+ "grad_norm": 0.726737380027771,
+ "learning_rate": 0.00018159830980345548,
+ "loss": 1.5377,
+ "step": 588
+ },
+ {
+ "epoch": 0.5891657028539277,
+ "grad_norm": 0.6792006492614746,
+ "learning_rate": 0.0001815376684351362,
+ "loss": 1.8905,
+ "step": 589
+ },
+ {
+ "epoch": 0.5901659841830514,
+ "grad_norm": 0.7885284423828125,
+ "learning_rate": 0.00018147693747166534,
+ "loss": 1.709,
+ "step": 590
+ },
+ {
+ "epoch": 0.5911662655121753,
+ "grad_norm": 0.7270367741584778,
+ "learning_rate": 0.00018141611697977529,
+ "loss": 1.7973,
+ "step": 591
+ },
+ {
+ "epoch": 0.5921665468412991,
+ "grad_norm": 0.7852438688278198,
+ "learning_rate": 0.00018135520702629675,
+ "loss": 1.6312,
+ "step": 592
+ },
+ {
+ "epoch": 0.5931668281704229,
+ "grad_norm": 0.7881343960762024,
+ "learning_rate": 0.0001812942076781588,
+ "loss": 1.7581,
+ "step": 593
+ },
+ {
+ "epoch": 0.5941671094995468,
+ "grad_norm": 0.7581279277801514,
+ "learning_rate": 0.0001812331190023886,
+ "loss": 1.6811,
+ "step": 594
+ },
+ {
+ "epoch": 0.5951673908286705,
+ "grad_norm": 0.7250573039054871,
+ "learning_rate": 0.0001811719410661116,
+ "loss": 1.6835,
+ "step": 595
+ },
+ {
+ "epoch": 0.5961676721577944,
+ "grad_norm": 0.7383652329444885,
+ "learning_rate": 0.00018111067393655132,
+ "loss": 1.7804,
+ "step": 596
+ },
+ {
+ "epoch": 0.5971679534869182,
+ "grad_norm": 0.6631885170936584,
+ "learning_rate": 0.0001810493176810292,
+ "loss": 1.477,
+ "step": 597
+ },
+ {
+ "epoch": 0.598168234816042,
+ "grad_norm": 0.6705698370933533,
+ "learning_rate": 0.00018098787236696474,
+ "loss": 1.5939,
+ "step": 598
+ },
+ {
+ "epoch": 0.5991685161451659,
+ "grad_norm": 0.6646862626075745,
+ "learning_rate": 0.00018092633806187513,
+ "loss": 1.5903,
+ "step": 599
+ },
+ {
+ "epoch": 0.6001687974742896,
+ "grad_norm": 0.7267604470252991,
+ "learning_rate": 0.0001808647148333755,
+ "loss": 1.6864,
+ "step": 600
+ },
+ {
+ "epoch": 0.6011690788034134,
+ "grad_norm": 0.672102689743042,
+ "learning_rate": 0.00018080300274917862,
+ "loss": 1.73,
+ "step": 601
+ },
+ {
+ "epoch": 0.6021693601325373,
+ "grad_norm": 0.7541230320930481,
+ "learning_rate": 0.00018074120187709495,
+ "loss": 1.7824,
+ "step": 602
+ },
+ {
+ "epoch": 0.6031696414616611,
+ "grad_norm": 0.6589316129684448,
+ "learning_rate": 0.00018067931228503246,
+ "loss": 1.7007,
+ "step": 603
+ },
+ {
+ "epoch": 0.604169922790785,
+ "grad_norm": 0.7083007097244263,
+ "learning_rate": 0.00018061733404099655,
+ "loss": 1.7703,
+ "step": 604
+ },
+ {
+ "epoch": 0.6051702041199087,
+ "grad_norm": 0.6700689196586609,
+ "learning_rate": 0.00018055526721309016,
+ "loss": 1.5967,
+ "step": 605
+ },
+ {
+ "epoch": 0.6061704854490325,
+ "grad_norm": 0.6766354441642761,
+ "learning_rate": 0.0001804931118695135,
+ "loss": 1.6208,
+ "step": 606
+ },
+ {
+ "epoch": 0.6071707667781564,
+ "grad_norm": 0.8214102387428284,
+ "learning_rate": 0.00018043086807856403,
+ "loss": 1.6084,
+ "step": 607
+ },
+ {
+ "epoch": 0.6081710481072802,
+ "grad_norm": 0.736492395401001,
+ "learning_rate": 0.00018036853590863648,
+ "loss": 1.7254,
+ "step": 608
+ },
+ {
+ "epoch": 0.609171329436404,
+ "grad_norm": 0.6791033148765564,
+ "learning_rate": 0.00018030611542822257,
+ "loss": 1.5837,
+ "step": 609
+ },
+ {
+ "epoch": 0.6101716107655278,
+ "grad_norm": 0.6344060301780701,
+ "learning_rate": 0.00018024360670591114,
+ "loss": 1.5407,
+ "step": 610
+ },
+ {
+ "epoch": 0.6111718920946516,
+ "grad_norm": 0.9026575088500977,
+ "learning_rate": 0.00018018100981038798,
+ "loss": 1.6748,
+ "step": 611
+ },
+ {
+ "epoch": 0.6121721734237754,
+ "grad_norm": 0.8029866814613342,
+ "learning_rate": 0.00018011832481043576,
+ "loss": 1.7547,
+ "step": 612
+ },
+ {
+ "epoch": 0.6131724547528993,
+ "grad_norm": 0.8065117597579956,
+ "learning_rate": 0.00018005555177493394,
+ "loss": 1.8051,
+ "step": 613
+ },
+ {
+ "epoch": 0.614172736082023,
+ "grad_norm": 0.7858480215072632,
+ "learning_rate": 0.00017999269077285875,
+ "loss": 1.6728,
+ "step": 614
+ },
+ {
+ "epoch": 0.6151730174111469,
+ "grad_norm": 0.6735272407531738,
+ "learning_rate": 0.00017992974187328305,
+ "loss": 1.8585,
+ "step": 615
+ },
+ {
+ "epoch": 0.6161732987402707,
+ "grad_norm": 0.7518951892852783,
+ "learning_rate": 0.00017986670514537627,
+ "loss": 1.5429,
+ "step": 616
+ },
+ {
+ "epoch": 0.6171735800693945,
+ "grad_norm": 0.6952928900718689,
+ "learning_rate": 0.00017980358065840444,
+ "loss": 1.5982,
+ "step": 617
+ },
+ {
+ "epoch": 0.6181738613985184,
+ "grad_norm": 0.8996840119361877,
+ "learning_rate": 0.0001797403684817299,
+ "loss": 1.8164,
+ "step": 618
+ },
+ {
+ "epoch": 0.6191741427276422,
+ "grad_norm": 0.7645425200462341,
+ "learning_rate": 0.00017967706868481144,
+ "loss": 1.8373,
+ "step": 619
+ },
+ {
+ "epoch": 0.6201744240567659,
+ "grad_norm": 0.8479064106941223,
+ "learning_rate": 0.00017961368133720407,
+ "loss": 1.6483,
+ "step": 620
+ },
+ {
+ "epoch": 0.6211747053858898,
+ "grad_norm": 0.7806827425956726,
+ "learning_rate": 0.000179550206508559,
+ "loss": 1.78,
+ "step": 621
+ },
+ {
+ "epoch": 0.6221749867150136,
+ "grad_norm": 0.6476775407791138,
+ "learning_rate": 0.00017948664426862364,
+ "loss": 1.7712,
+ "step": 622
+ },
+ {
+ "epoch": 0.6231752680441374,
+ "grad_norm": 0.8421279788017273,
+ "learning_rate": 0.00017942299468724134,
+ "loss": 1.7753,
+ "step": 623
+ },
+ {
+ "epoch": 0.6241755493732613,
+ "grad_norm": 0.6706071496009827,
+ "learning_rate": 0.0001793592578343515,
+ "loss": 1.4093,
+ "step": 624
+ },
+ {
+ "epoch": 0.625175830702385,
+ "grad_norm": 0.8224231004714966,
+ "learning_rate": 0.0001792954337799894,
+ "loss": 1.7343,
+ "step": 625
+ },
+ {
+ "epoch": 0.6261761120315089,
+ "grad_norm": 0.8398690819740295,
+ "learning_rate": 0.00017923152259428612,
+ "loss": 1.8017,
+ "step": 626
+ },
+ {
+ "epoch": 0.6271763933606327,
+ "grad_norm": 0.6664738059043884,
+ "learning_rate": 0.00017916752434746856,
+ "loss": 1.6023,
+ "step": 627
+ },
+ {
+ "epoch": 0.6281766746897565,
+ "grad_norm": 0.9246477484703064,
+ "learning_rate": 0.0001791034391098591,
+ "loss": 1.7862,
+ "step": 628
+ },
+ {
+ "epoch": 0.6291769560188804,
+ "grad_norm": 0.797835111618042,
+ "learning_rate": 0.00017903926695187595,
+ "loss": 1.6059,
+ "step": 629
+ },
+ {
+ "epoch": 0.6301772373480041,
+ "grad_norm": 0.613727331161499,
+ "learning_rate": 0.0001789750079440326,
+ "loss": 1.5086,
+ "step": 630
+ },
+ {
+ "epoch": 0.6311775186771279,
+ "grad_norm": 0.7127765417098999,
+ "learning_rate": 0.00017891066215693817,
+ "loss": 1.5985,
+ "step": 631
+ },
+ {
+ "epoch": 0.6321778000062518,
+ "grad_norm": 0.6923073530197144,
+ "learning_rate": 0.00017884622966129695,
+ "loss": 1.5537,
+ "step": 632
+ },
+ {
+ "epoch": 0.6331780813353756,
+ "grad_norm": 0.7015733122825623,
+ "learning_rate": 0.00017878171052790868,
+ "loss": 1.7782,
+ "step": 633
+ },
+ {
+ "epoch": 0.6341783626644993,
+ "grad_norm": 0.6932784914970398,
+ "learning_rate": 0.00017871710482766817,
+ "loss": 1.4294,
+ "step": 634
+ },
+ {
+ "epoch": 0.6351786439936232,
+ "grad_norm": 0.7054254412651062,
+ "learning_rate": 0.00017865241263156546,
+ "loss": 1.7227,
+ "step": 635
+ },
+ {
+ "epoch": 0.636178925322747,
+ "grad_norm": 0.6994242072105408,
+ "learning_rate": 0.0001785876340106855,
+ "loss": 1.5998,
+ "step": 636
+ },
+ {
+ "epoch": 0.6371792066518709,
+ "grad_norm": 0.81461501121521,
+ "learning_rate": 0.0001785227690362083,
+ "loss": 1.7212,
+ "step": 637
+ },
+ {
+ "epoch": 0.6381794879809947,
+ "grad_norm": 0.943434476852417,
+ "learning_rate": 0.00017845781777940878,
+ "loss": 1.5926,
+ "step": 638
+ },
+ {
+ "epoch": 0.6391797693101184,
+ "grad_norm": 0.8455945253372192,
+ "learning_rate": 0.00017839278031165658,
+ "loss": 1.8511,
+ "step": 639
+ },
+ {
+ "epoch": 0.6401800506392423,
+ "grad_norm": 0.9348243474960327,
+ "learning_rate": 0.00017832765670441612,
+ "loss": 1.6293,
+ "step": 640
+ },
+ {
+ "epoch": 0.6411803319683661,
+ "grad_norm": 0.746127724647522,
+ "learning_rate": 0.0001782624470292465,
+ "loss": 1.4903,
+ "step": 641
+ },
+ {
+ "epoch": 0.6421806132974899,
+ "grad_norm": 0.6215783357620239,
+ "learning_rate": 0.0001781971513578013,
+ "loss": 1.7806,
+ "step": 642
+ },
+ {
+ "epoch": 0.6431808946266138,
+ "grad_norm": 0.7447994947433472,
+ "learning_rate": 0.00017813176976182873,
+ "loss": 1.7475,
+ "step": 643
+ },
+ {
+ "epoch": 0.6441811759557375,
+ "grad_norm": 0.6916540265083313,
+ "learning_rate": 0.00017806630231317127,
+ "loss": 1.6401,
+ "step": 644
+ },
+ {
+ "epoch": 0.6451814572848613,
+ "grad_norm": 0.7208524942398071,
+ "learning_rate": 0.00017800074908376584,
+ "loss": 1.7524,
+ "step": 645
+ },
+ {
+ "epoch": 0.6461817386139852,
+ "grad_norm": 0.7548331618309021,
+ "learning_rate": 0.00017793511014564358,
+ "loss": 1.5644,
+ "step": 646
+ },
+ {
+ "epoch": 0.647182019943109,
+ "grad_norm": 0.7919667959213257,
+ "learning_rate": 0.00017786938557092983,
+ "loss": 1.6758,
+ "step": 647
+ },
+ {
+ "epoch": 0.6481823012722329,
+ "grad_norm": 0.700618326663971,
+ "learning_rate": 0.00017780357543184397,
+ "loss": 1.5213,
+ "step": 648
+ },
+ {
+ "epoch": 0.6491825826013566,
+ "grad_norm": 0.6646535992622375,
+ "learning_rate": 0.00017773767980069945,
+ "loss": 1.6487,
+ "step": 649
+ },
+ {
+ "epoch": 0.6501828639304804,
+ "grad_norm": 0.6486669182777405,
+ "learning_rate": 0.0001776716987499037,
+ "loss": 1.6556,
+ "step": 650
+ },
+ {
+ "epoch": 0.6511831452596043,
+ "grad_norm": 0.657747745513916,
+ "learning_rate": 0.0001776056323519579,
+ "loss": 1.5943,
+ "step": 651
+ },
+ {
+ "epoch": 0.6521834265887281,
+ "grad_norm": 0.7777379751205444,
+ "learning_rate": 0.00017753948067945712,
+ "loss": 1.6069,
+ "step": 652
+ },
+ {
+ "epoch": 0.6531837079178519,
+ "grad_norm": 0.772153913974762,
+ "learning_rate": 0.00017747324380509006,
+ "loss": 1.7065,
+ "step": 653
+ },
+ {
+ "epoch": 0.6541839892469757,
+ "grad_norm": 0.6984367966651917,
+ "learning_rate": 0.00017740692180163908,
+ "loss": 1.7122,
+ "step": 654
+ },
+ {
+ "epoch": 0.6551842705760995,
+ "grad_norm": 0.8033855557441711,
+ "learning_rate": 0.00017734051474198003,
+ "loss": 1.6095,
+ "step": 655
+ },
+ {
+ "epoch": 0.6561845519052233,
+ "grad_norm": 0.7568691372871399,
+ "learning_rate": 0.0001772740226990823,
+ "loss": 1.6783,
+ "step": 656
+ },
+ {
+ "epoch": 0.6571848332343472,
+ "grad_norm": 0.7288162708282471,
+ "learning_rate": 0.00017720744574600863,
+ "loss": 1.695,
+ "step": 657
+ },
+ {
+ "epoch": 0.658185114563471,
+ "grad_norm": 0.6898120045661926,
+ "learning_rate": 0.00017714078395591502,
+ "loss": 1.6539,
+ "step": 658
+ },
+ {
+ "epoch": 0.6591853958925948,
+ "grad_norm": 0.6977367997169495,
+ "learning_rate": 0.00017707403740205071,
+ "loss": 1.4558,
+ "step": 659
+ },
+ {
+ "epoch": 0.6601856772217186,
+ "grad_norm": 0.6594682335853577,
+ "learning_rate": 0.00017700720615775812,
+ "loss": 1.56,
+ "step": 660
+ },
+ {
+ "epoch": 0.6611859585508424,
+ "grad_norm": 0.6146736741065979,
+ "learning_rate": 0.0001769402902964727,
+ "loss": 1.7014,
+ "step": 661
+ },
+ {
+ "epoch": 0.6621862398799663,
+ "grad_norm": 0.7182234525680542,
+ "learning_rate": 0.00017687328989172288,
+ "loss": 1.5655,
+ "step": 662
+ },
+ {
+ "epoch": 0.66318652120909,
+ "grad_norm": 0.6940692067146301,
+ "learning_rate": 0.00017680620501712996,
+ "loss": 1.6177,
+ "step": 663
+ },
+ {
+ "epoch": 0.6641868025382138,
+ "grad_norm": 0.7672961950302124,
+ "learning_rate": 0.00017673903574640814,
+ "loss": 1.559,
+ "step": 664
+ },
+ {
+ "epoch": 0.6651870838673377,
+ "grad_norm": 0.654500424861908,
+ "learning_rate": 0.00017667178215336423,
+ "loss": 1.5024,
+ "step": 665
+ },
+ {
+ "epoch": 0.6661873651964615,
+ "grad_norm": 0.8137261867523193,
+ "learning_rate": 0.0001766044443118978,
+ "loss": 1.7865,
+ "step": 666
+ },
+ {
+ "epoch": 0.6671876465255854,
+ "grad_norm": 0.806624710559845,
+ "learning_rate": 0.000176537022296001,
+ "loss": 1.4944,
+ "step": 667
+ },
+ {
+ "epoch": 0.6681879278547092,
+ "grad_norm": 0.7952747941017151,
+ "learning_rate": 0.00017646951617975837,
+ "loss": 1.5371,
+ "step": 668
+ },
+ {
+ "epoch": 0.6691882091838329,
+ "grad_norm": 0.6380738615989685,
+ "learning_rate": 0.00017640192603734692,
+ "loss": 1.3117,
+ "step": 669
+ },
+ {
+ "epoch": 0.6701884905129568,
+ "grad_norm": 0.6559002995491028,
+ "learning_rate": 0.00017633425194303606,
+ "loss": 1.3662,
+ "step": 670
+ },
+ {
+ "epoch": 0.6711887718420806,
+ "grad_norm": 0.715826153755188,
+ "learning_rate": 0.00017626649397118734,
+ "loss": 1.7271,
+ "step": 671
+ },
+ {
+ "epoch": 0.6721890531712044,
+ "grad_norm": 0.6719872355461121,
+ "learning_rate": 0.00017619865219625452,
+ "loss": 1.747,
+ "step": 672
+ },
+ {
+ "epoch": 0.6731893345003283,
+ "grad_norm": 0.6901715397834778,
+ "learning_rate": 0.00017613072669278343,
+ "loss": 1.6438,
+ "step": 673
+ },
+ {
+ "epoch": 0.674189615829452,
+ "grad_norm": 0.6601479649543762,
+ "learning_rate": 0.00017606271753541192,
+ "loss": 1.8191,
+ "step": 674
+ },
+ {
+ "epoch": 0.6751898971585758,
+ "grad_norm": 0.8059187531471252,
+ "learning_rate": 0.00017599462479886974,
+ "loss": 1.6946,
+ "step": 675
+ },
+ {
+ "epoch": 0.6761901784876997,
+ "grad_norm": 0.6966856718063354,
+ "learning_rate": 0.00017592644855797854,
+ "loss": 1.5551,
+ "step": 676
+ },
+ {
+ "epoch": 0.6771904598168235,
+ "grad_norm": 0.7306144833564758,
+ "learning_rate": 0.00017585818888765168,
+ "loss": 1.5429,
+ "step": 677
+ },
+ {
+ "epoch": 0.6781907411459474,
+ "grad_norm": 0.572907030582428,
+ "learning_rate": 0.0001757898458628941,
+ "loss": 1.4437,
+ "step": 678
+ },
+ {
+ "epoch": 0.6791910224750711,
+ "grad_norm": 0.6807466149330139,
+ "learning_rate": 0.00017572141955880252,
+ "loss": 1.6307,
+ "step": 679
+ },
+ {
+ "epoch": 0.6801913038041949,
+ "grad_norm": 0.7529204487800598,
+ "learning_rate": 0.00017565291005056504,
+ "loss": 1.631,
+ "step": 680
+ },
+ {
+ "epoch": 0.6811915851333188,
+ "grad_norm": 0.6292940378189087,
+ "learning_rate": 0.00017558431741346122,
+ "loss": 1.7512,
+ "step": 681
+ },
+ {
+ "epoch": 0.6821918664624426,
+ "grad_norm": 0.7981480956077576,
+ "learning_rate": 0.00017551564172286197,
+ "loss": 1.7704,
+ "step": 682
+ },
+ {
+ "epoch": 0.6831921477915663,
+ "grad_norm": 0.7816259860992432,
+ "learning_rate": 0.00017544688305422943,
+ "loss": 1.4954,
+ "step": 683
+ },
+ {
+ "epoch": 0.6841924291206902,
+ "grad_norm": 0.6866456866264343,
+ "learning_rate": 0.00017537804148311695,
+ "loss": 1.7986,
+ "step": 684
+ },
+ {
+ "epoch": 0.685192710449814,
+ "grad_norm": 0.7499064803123474,
+ "learning_rate": 0.00017530911708516902,
+ "loss": 1.6472,
+ "step": 685
+ },
+ {
+ "epoch": 0.6861929917789378,
+ "grad_norm": 0.5923457145690918,
+ "learning_rate": 0.00017524010993612098,
+ "loss": 1.4866,
+ "step": 686
+ },
+ {
+ "epoch": 0.6871932731080617,
+ "grad_norm": 0.6991822719573975,
+ "learning_rate": 0.00017517102011179933,
+ "loss": 1.605,
+ "step": 687
+ },
+ {
+ "epoch": 0.6881935544371854,
+ "grad_norm": 0.7880247235298157,
+ "learning_rate": 0.0001751018476881212,
+ "loss": 1.641,
+ "step": 688
+ },
+ {
+ "epoch": 0.6891938357663093,
+ "grad_norm": 0.7848097085952759,
+ "learning_rate": 0.00017503259274109464,
+ "loss": 1.7505,
+ "step": 689
+ },
+ {
+ "epoch": 0.6901941170954331,
+ "grad_norm": 0.693678081035614,
+ "learning_rate": 0.00017496325534681825,
+ "loss": 1.6565,
+ "step": 690
+ },
+ {
+ "epoch": 0.6911943984245569,
+ "grad_norm": 0.8232877254486084,
+ "learning_rate": 0.00017489383558148136,
+ "loss": 1.7664,
+ "step": 691
+ },
+ {
+ "epoch": 0.6921946797536808,
+ "grad_norm": 0.7834855914115906,
+ "learning_rate": 0.00017482433352136365,
+ "loss": 1.4381,
+ "step": 692
+ },
+ {
+ "epoch": 0.6931949610828045,
+ "grad_norm": 0.6186713576316833,
+ "learning_rate": 0.00017475474924283536,
+ "loss": 1.6482,
+ "step": 693
+ },
+ {
+ "epoch": 0.6941952424119283,
+ "grad_norm": 0.7511133551597595,
+ "learning_rate": 0.00017468508282235704,
+ "loss": 1.6186,
+ "step": 694
+ },
+ {
+ "epoch": 0.6951955237410522,
+ "grad_norm": 0.8017745614051819,
+ "learning_rate": 0.00017461533433647946,
+ "loss": 1.6597,
+ "step": 695
+ },
+ {
+ "epoch": 0.696195805070176,
+ "grad_norm": 0.8190794587135315,
+ "learning_rate": 0.00017454550386184362,
+ "loss": 1.6602,
+ "step": 696
+ },
+ {
+ "epoch": 0.6971960863992998,
+ "grad_norm": 0.7479042410850525,
+ "learning_rate": 0.00017447559147518055,
+ "loss": 1.77,
+ "step": 697
+ },
+ {
+ "epoch": 0.6981963677284236,
+ "grad_norm": 0.7239962816238403,
+ "learning_rate": 0.00017440559725331135,
+ "loss": 1.5838,
+ "step": 698
+ },
+ {
+ "epoch": 0.6991966490575474,
+ "grad_norm": 0.7252762317657471,
+ "learning_rate": 0.000174335521273147,
+ "loss": 1.5462,
+ "step": 699
+ },
+ {
+ "epoch": 0.7001969303866713,
+ "grad_norm": 0.9383960962295532,
+ "learning_rate": 0.00017426536361168834,
+ "loss": 1.5104,
+ "step": 700
+ },
+ {
+ "epoch": 0.7011972117157951,
+ "grad_norm": 0.6944159269332886,
+ "learning_rate": 0.00017419512434602594,
+ "loss": 1.6382,
+ "step": 701
+ },
+ {
+ "epoch": 0.7021974930449189,
+ "grad_norm": 0.6809273362159729,
+ "learning_rate": 0.00017412480355334005,
+ "loss": 1.725,
+ "step": 702
+ },
+ {
+ "epoch": 0.7031977743740427,
+ "grad_norm": 0.7521125674247742,
+ "learning_rate": 0.00017405440131090048,
+ "loss": 1.8499,
+ "step": 703
+ },
+ {
+ "epoch": 0.7041980557031665,
+ "grad_norm": 0.6854100227355957,
+ "learning_rate": 0.00017398391769606658,
+ "loss": 1.6648,
+ "step": 704
+ },
+ {
+ "epoch": 0.7051983370322903,
+ "grad_norm": 0.7382327318191528,
+ "learning_rate": 0.00017391335278628712,
+ "loss": 1.5806,
+ "step": 705
+ },
+ {
+ "epoch": 0.7061986183614142,
+ "grad_norm": 0.7387582063674927,
+ "learning_rate": 0.00017384270665910014,
+ "loss": 1.5563,
+ "step": 706
+ },
+ {
+ "epoch": 0.707198899690538,
+ "grad_norm": 0.7698972821235657,
+ "learning_rate": 0.000173771979392133,
+ "loss": 1.6626,
+ "step": 707
+ },
+ {
+ "epoch": 0.7081991810196617,
+ "grad_norm": 0.7639899849891663,
+ "learning_rate": 0.00017370117106310214,
+ "loss": 1.6725,
+ "step": 708
+ },
+ {
+ "epoch": 0.7091994623487856,
+ "grad_norm": 0.6684393286705017,
+ "learning_rate": 0.0001736302817498131,
+ "loss": 1.64,
+ "step": 709
+ },
+ {
+ "epoch": 0.7101997436779094,
+ "grad_norm": 0.6329504251480103,
+ "learning_rate": 0.00017355931153016044,
+ "loss": 1.4472,
+ "step": 710
+ },
+ {
+ "epoch": 0.7112000250070333,
+ "grad_norm": 0.8133587837219238,
+ "learning_rate": 0.0001734882604821276,
+ "loss": 1.7971,
+ "step": 711
+ },
+ {
+ "epoch": 0.712200306336157,
+ "grad_norm": 0.6524143218994141,
+ "learning_rate": 0.0001734171286837868,
+ "loss": 1.5366,
+ "step": 712
+ },
+ {
+ "epoch": 0.7132005876652808,
+ "grad_norm": 0.6714311242103577,
+ "learning_rate": 0.00017334591621329906,
+ "loss": 1.841,
+ "step": 713
+ },
+ {
+ "epoch": 0.7142008689944047,
+ "grad_norm": 0.6690782904624939,
+ "learning_rate": 0.00017327462314891402,
+ "loss": 1.623,
+ "step": 714
+ },
+ {
+ "epoch": 0.7152011503235285,
+ "grad_norm": 0.650442361831665,
+ "learning_rate": 0.00017320324956896977,
+ "loss": 1.6124,
+ "step": 715
+ },
+ {
+ "epoch": 0.7162014316526523,
+ "grad_norm": 0.7075713276863098,
+ "learning_rate": 0.00017313179555189306,
+ "loss": 1.5154,
+ "step": 716
+ },
+ {
+ "epoch": 0.7172017129817762,
+ "grad_norm": 0.729060173034668,
+ "learning_rate": 0.00017306026117619889,
+ "loss": 1.7072,
+ "step": 717
+ },
+ {
+ "epoch": 0.7182019943108999,
+ "grad_norm": 0.8547433614730835,
+ "learning_rate": 0.0001729886465204906,
+ "loss": 1.6237,
+ "step": 718
+ },
+ {
+ "epoch": 0.7192022756400237,
+ "grad_norm": 0.6729336380958557,
+ "learning_rate": 0.0001729169516634598,
+ "loss": 1.7769,
+ "step": 719
+ },
+ {
+ "epoch": 0.7202025569691476,
+ "grad_norm": 0.7437167167663574,
+ "learning_rate": 0.0001728451766838861,
+ "loss": 1.5056,
+ "step": 720
+ },
+ {
+ "epoch": 0.7212028382982714,
+ "grad_norm": 0.6573147177696228,
+ "learning_rate": 0.00017277332166063726,
+ "loss": 1.7694,
+ "step": 721
+ },
+ {
+ "epoch": 0.7222031196273953,
+ "grad_norm": 0.6767126321792603,
+ "learning_rate": 0.00017270138667266894,
+ "loss": 1.6014,
+ "step": 722
+ },
+ {
+ "epoch": 0.723203400956519,
+ "grad_norm": 0.7488179206848145,
+ "learning_rate": 0.00017262937179902472,
+ "loss": 1.573,
+ "step": 723
+ },
+ {
+ "epoch": 0.7242036822856428,
+ "grad_norm": 0.6491002440452576,
+ "learning_rate": 0.00017255727711883588,
+ "loss": 1.6705,
+ "step": 724
+ },
+ {
+ "epoch": 0.7252039636147667,
+ "grad_norm": 0.764090359210968,
+ "learning_rate": 0.00017248510271132144,
+ "loss": 1.6761,
+ "step": 725
+ },
+ {
+ "epoch": 0.7262042449438905,
+ "grad_norm": 0.7116997838020325,
+ "learning_rate": 0.00017241284865578802,
+ "loss": 1.7435,
+ "step": 726
+ },
+ {
+ "epoch": 0.7272045262730142,
+ "grad_norm": 0.6367645859718323,
+ "learning_rate": 0.00017234051503162978,
+ "loss": 1.7061,
+ "step": 727
+ },
+ {
+ "epoch": 0.7282048076021381,
+ "grad_norm": 0.7232155203819275,
+ "learning_rate": 0.0001722681019183283,
+ "loss": 1.8142,
+ "step": 728
+ },
+ {
+ "epoch": 0.7292050889312619,
+ "grad_norm": 0.7533649802207947,
+ "learning_rate": 0.00017219560939545246,
+ "loss": 1.8202,
+ "step": 729
+ },
+ {
+ "epoch": 0.7302053702603858,
+ "grad_norm": 0.6923018097877502,
+ "learning_rate": 0.00017212303754265843,
+ "loss": 1.4925,
+ "step": 730
+ },
+ {
+ "epoch": 0.7312056515895096,
+ "grad_norm": 0.7326932549476624,
+ "learning_rate": 0.0001720503864396896,
+ "loss": 1.5192,
+ "step": 731
+ },
+ {
+ "epoch": 0.7322059329186333,
+ "grad_norm": 0.7220762968063354,
+ "learning_rate": 0.00017197765616637636,
+ "loss": 1.7601,
+ "step": 732
+ },
+ {
+ "epoch": 0.7332062142477572,
+ "grad_norm": 0.605725884437561,
+ "learning_rate": 0.0001719048468026361,
+ "loss": 1.6309,
+ "step": 733
+ },
+ {
+ "epoch": 0.734206495576881,
+ "grad_norm": 0.6728388667106628,
+ "learning_rate": 0.00017183195842847322,
+ "loss": 1.5993,
+ "step": 734
+ },
+ {
+ "epoch": 0.7352067769060048,
+ "grad_norm": 0.7035244703292847,
+ "learning_rate": 0.0001717589911239788,
+ "loss": 1.6031,
+ "step": 735
+ },
+ {
+ "epoch": 0.7362070582351287,
+ "grad_norm": 0.7473010420799255,
+ "learning_rate": 0.00017168594496933074,
+ "loss": 1.5833,
+ "step": 736
+ },
+ {
+ "epoch": 0.7372073395642524,
+ "grad_norm": 0.6310701370239258,
+ "learning_rate": 0.00017161282004479351,
+ "loss": 1.4328,
+ "step": 737
+ },
+ {
+ "epoch": 0.7382076208933762,
+ "grad_norm": 0.6805673837661743,
+ "learning_rate": 0.0001715396164307182,
+ "loss": 1.5429,
+ "step": 738
+ },
+ {
+ "epoch": 0.7392079022225001,
+ "grad_norm": 0.747222900390625,
+ "learning_rate": 0.0001714663342075424,
+ "loss": 1.7696,
+ "step": 739
+ },
+ {
+ "epoch": 0.7402081835516239,
+ "grad_norm": 0.8214403390884399,
+ "learning_rate": 0.00017139297345578994,
+ "loss": 1.5997,
+ "step": 740
+ },
+ {
+ "epoch": 0.7412084648807478,
+ "grad_norm": 0.6722521781921387,
+ "learning_rate": 0.00017131953425607104,
+ "loss": 1.5287,
+ "step": 741
+ },
+ {
+ "epoch": 0.7422087462098715,
+ "grad_norm": 0.6937971115112305,
+ "learning_rate": 0.00017124601668908212,
+ "loss": 1.7263,
+ "step": 742
+ },
+ {
+ "epoch": 0.7432090275389953,
+ "grad_norm": 0.7590844631195068,
+ "learning_rate": 0.00017117242083560568,
+ "loss": 1.7263,
+ "step": 743
+ },
+ {
+ "epoch": 0.7442093088681192,
+ "grad_norm": 0.7913306355476379,
+ "learning_rate": 0.00017109874677651024,
+ "loss": 1.7646,
+ "step": 744
+ },
+ {
+ "epoch": 0.745209590197243,
+ "grad_norm": 0.7123669385910034,
+ "learning_rate": 0.0001710249945927503,
+ "loss": 1.6768,
+ "step": 745
+ },
+ {
+ "epoch": 0.7462098715263668,
+ "grad_norm": 0.8426288366317749,
+ "learning_rate": 0.00017095116436536612,
+ "loss": 1.8496,
+ "step": 746
+ },
+ {
+ "epoch": 0.7472101528554906,
+ "grad_norm": 0.6152015328407288,
+ "learning_rate": 0.00017087725617548385,
+ "loss": 1.4527,
+ "step": 747
+ },
+ {
+ "epoch": 0.7482104341846144,
+ "grad_norm": 0.8348223567008972,
+ "learning_rate": 0.00017080327010431513,
+ "loss": 1.4847,
+ "step": 748
+ },
+ {
+ "epoch": 0.7492107155137382,
+ "grad_norm": 0.7883800268173218,
+ "learning_rate": 0.00017072920623315734,
+ "loss": 1.5941,
+ "step": 749
+ },
+ {
+ "epoch": 0.7502109968428621,
+ "grad_norm": 0.6957768201828003,
+ "learning_rate": 0.00017065506464339326,
+ "loss": 1.7543,
+ "step": 750
+ },
+ {
+ "epoch": 0.7512112781719859,
+ "grad_norm": 0.5898700952529907,
+ "learning_rate": 0.00017058084541649106,
+ "loss": 1.7859,
+ "step": 751
+ },
+ {
+ "epoch": 0.7522115595011097,
+ "grad_norm": 0.6882239580154419,
+ "learning_rate": 0.00017050654863400429,
+ "loss": 1.3233,
+ "step": 752
+ },
+ {
+ "epoch": 0.7532118408302335,
+ "grad_norm": 0.7327316999435425,
+ "learning_rate": 0.00017043217437757164,
+ "loss": 1.5067,
+ "step": 753
+ },
+ {
+ "epoch": 0.7542121221593573,
+ "grad_norm": 0.9257964491844177,
+ "learning_rate": 0.00017035772272891702,
+ "loss": 1.503,
+ "step": 754
+ },
+ {
+ "epoch": 0.7552124034884812,
+ "grad_norm": 0.7924116253852844,
+ "learning_rate": 0.00017028319376984928,
+ "loss": 1.8975,
+ "step": 755
+ },
+ {
+ "epoch": 0.756212684817605,
+ "grad_norm": 0.6651099920272827,
+ "learning_rate": 0.00017020858758226229,
+ "loss": 1.649,
+ "step": 756
+ },
+ {
+ "epoch": 0.7572129661467287,
+ "grad_norm": 0.7257362604141235,
+ "learning_rate": 0.0001701339042481347,
+ "loss": 1.6919,
+ "step": 757
+ },
+ {
+ "epoch": 0.7582132474758526,
+ "grad_norm": 0.8733739852905273,
+ "learning_rate": 0.00017005914384953007,
+ "loss": 1.5929,
+ "step": 758
+ },
+ {
+ "epoch": 0.7592135288049764,
+ "grad_norm": 0.6347383856773376,
+ "learning_rate": 0.00016998430646859654,
+ "loss": 1.3341,
+ "step": 759
+ },
+ {
+ "epoch": 0.7602138101341002,
+ "grad_norm": 0.6915012001991272,
+ "learning_rate": 0.00016990939218756683,
+ "loss": 1.4971,
+ "step": 760
+ },
+ {
+ "epoch": 0.761214091463224,
+ "grad_norm": 0.7862069606781006,
+ "learning_rate": 0.0001698344010887582,
+ "loss": 1.7468,
+ "step": 761
+ },
+ {
+ "epoch": 0.7622143727923478,
+ "grad_norm": 0.7318029403686523,
+ "learning_rate": 0.0001697593332545723,
+ "loss": 1.8143,
+ "step": 762
+ },
+ {
+ "epoch": 0.7632146541214717,
+ "grad_norm": 0.6758155226707458,
+ "learning_rate": 0.0001696841887674951,
+ "loss": 1.6652,
+ "step": 763
+ },
+ {
+ "epoch": 0.7642149354505955,
+ "grad_norm": 0.6853237748146057,
+ "learning_rate": 0.00016960896771009684,
+ "loss": 1.5176,
+ "step": 764
+ },
+ {
+ "epoch": 0.7652152167797193,
+ "grad_norm": 0.9686934351921082,
+ "learning_rate": 0.00016953367016503182,
+ "loss": 1.5366,
+ "step": 765
+ },
+ {
+ "epoch": 0.7662154981088432,
+ "grad_norm": 0.7232028841972351,
+ "learning_rate": 0.00016945829621503838,
+ "loss": 1.6932,
+ "step": 766
+ },
+ {
+ "epoch": 0.7672157794379669,
+ "grad_norm": 0.6606596112251282,
+ "learning_rate": 0.00016938284594293897,
+ "loss": 1.7051,
+ "step": 767
+ },
+ {
+ "epoch": 0.7682160607670907,
+ "grad_norm": 0.6337714195251465,
+ "learning_rate": 0.00016930731943163972,
+ "loss": 1.6505,
+ "step": 768
+ },
+ {
+ "epoch": 0.7692163420962146,
+ "grad_norm": 0.6292264461517334,
+ "learning_rate": 0.00016923171676413063,
+ "loss": 1.7207,
+ "step": 769
+ },
+ {
+ "epoch": 0.7702166234253384,
+ "grad_norm": 0.7183407545089722,
+ "learning_rate": 0.00016915603802348535,
+ "loss": 1.7025,
+ "step": 770
+ },
+ {
+ "epoch": 0.7712169047544621,
+ "grad_norm": 0.805107593536377,
+ "learning_rate": 0.00016908028329286112,
+ "loss": 1.592,
+ "step": 771
+ },
+ {
+ "epoch": 0.772217186083586,
+ "grad_norm": 0.725777804851532,
+ "learning_rate": 0.0001690044526554987,
+ "loss": 1.6714,
+ "step": 772
+ },
+ {
+ "epoch": 0.7732174674127098,
+ "grad_norm": 0.6801775097846985,
+ "learning_rate": 0.00016892854619472223,
+ "loss": 1.5047,
+ "step": 773
+ },
+ {
+ "epoch": 0.7742177487418337,
+ "grad_norm": 0.7701449990272522,
+ "learning_rate": 0.00016885256399393924,
+ "loss": 1.5506,
+ "step": 774
+ },
+ {
+ "epoch": 0.7752180300709575,
+ "grad_norm": 0.6954746842384338,
+ "learning_rate": 0.00016877650613664034,
+ "loss": 1.4859,
+ "step": 775
+ },
+ {
+ "epoch": 0.7762183114000812,
+ "grad_norm": 0.7431885004043579,
+ "learning_rate": 0.00016870037270639942,
+ "loss": 1.6087,
+ "step": 776
+ },
+ {
+ "epoch": 0.7772185927292051,
+ "grad_norm": 0.687329113483429,
+ "learning_rate": 0.0001686241637868734,
+ "loss": 1.7038,
+ "step": 777
+ },
+ {
+ "epoch": 0.7782188740583289,
+ "grad_norm": 0.6656787395477295,
+ "learning_rate": 0.00016854787946180198,
+ "loss": 1.5691,
+ "step": 778
+ },
+ {
+ "epoch": 0.7792191553874527,
+ "grad_norm": 0.7476064562797546,
+ "learning_rate": 0.00016847151981500789,
+ "loss": 1.4972,
+ "step": 779
+ },
+ {
+ "epoch": 0.7802194367165766,
+ "grad_norm": 0.7320332527160645,
+ "learning_rate": 0.00016839508493039657,
+ "loss": 1.7326,
+ "step": 780
+ },
+ {
+ "epoch": 0.7812197180457003,
+ "grad_norm": 0.6432293057441711,
+ "learning_rate": 0.00016831857489195618,
+ "loss": 1.542,
+ "step": 781
+ },
+ {
+ "epoch": 0.7822199993748241,
+ "grad_norm": 0.6751729846000671,
+ "learning_rate": 0.00016824198978375736,
+ "loss": 1.6864,
+ "step": 782
+ },
+ {
+ "epoch": 0.783220280703948,
+ "grad_norm": 0.770193338394165,
+ "learning_rate": 0.00016816532968995328,
+ "loss": 1.5318,
+ "step": 783
+ },
+ {
+ "epoch": 0.7842205620330718,
+ "grad_norm": 0.6820619106292725,
+ "learning_rate": 0.0001680885946947796,
+ "loss": 1.6004,
+ "step": 784
+ },
+ {
+ "epoch": 0.7852208433621957,
+ "grad_norm": 0.9120951294898987,
+ "learning_rate": 0.00016801178488255413,
+ "loss": 1.6506,
+ "step": 785
+ },
+ {
+ "epoch": 0.7862211246913194,
+ "grad_norm": 0.7819542288780212,
+ "learning_rate": 0.00016793490033767698,
+ "loss": 1.5292,
+ "step": 786
+ },
+ {
+ "epoch": 0.7872214060204432,
+ "grad_norm": 0.6647278666496277,
+ "learning_rate": 0.00016785794114463037,
+ "loss": 1.5941,
+ "step": 787
+ },
+ {
+ "epoch": 0.7882216873495671,
+ "grad_norm": 0.6874713897705078,
+ "learning_rate": 0.00016778090738797853,
+ "loss": 1.5543,
+ "step": 788
+ },
+ {
+ "epoch": 0.7892219686786909,
+ "grad_norm": 0.7759424448013306,
+ "learning_rate": 0.00016770379915236766,
+ "loss": 1.6788,
+ "step": 789
+ },
+ {
+ "epoch": 0.7902222500078147,
+ "grad_norm": 0.724583625793457,
+ "learning_rate": 0.00016762661652252567,
+ "loss": 1.5998,
+ "step": 790
+ },
+ {
+ "epoch": 0.7912225313369385,
+ "grad_norm": 0.7921720743179321,
+ "learning_rate": 0.00016754935958326244,
+ "loss": 1.5956,
+ "step": 791
+ },
+ {
+ "epoch": 0.7922228126660623,
+ "grad_norm": 0.6484968662261963,
+ "learning_rate": 0.00016747202841946928,
+ "loss": 1.5708,
+ "step": 792
+ },
+ {
+ "epoch": 0.7932230939951862,
+ "grad_norm": 0.6372153759002686,
+ "learning_rate": 0.00016739462311611919,
+ "loss": 1.5213,
+ "step": 793
+ },
+ {
+ "epoch": 0.79422337532431,
+ "grad_norm": 0.7025095224380493,
+ "learning_rate": 0.00016731714375826657,
+ "loss": 1.4701,
+ "step": 794
+ },
+ {
+ "epoch": 0.7952236566534338,
+ "grad_norm": 0.681094765663147,
+ "learning_rate": 0.00016723959043104728,
+ "loss": 1.5101,
+ "step": 795
+ },
+ {
+ "epoch": 0.7962239379825576,
+ "grad_norm": 0.7129995822906494,
+ "learning_rate": 0.00016716196321967832,
+ "loss": 1.6038,
+ "step": 796
+ },
+ {
+ "epoch": 0.7972242193116814,
+ "grad_norm": 0.7403759360313416,
+ "learning_rate": 0.00016708426220945802,
+ "loss": 1.5906,
+ "step": 797
+ },
+ {
+ "epoch": 0.7982245006408052,
+ "grad_norm": 0.6562372446060181,
+ "learning_rate": 0.00016700648748576574,
+ "loss": 1.6469,
+ "step": 798
+ },
+ {
+ "epoch": 0.7992247819699291,
+ "grad_norm": 0.839885413646698,
+ "learning_rate": 0.0001669286391340618,
+ "loss": 1.5385,
+ "step": 799
+ },
+ {
+ "epoch": 0.8002250632990529,
+ "grad_norm": 0.8687535524368286,
+ "learning_rate": 0.00016685071723988748,
+ "loss": 1.6759,
+ "step": 800
+ },
+ {
+ "epoch": 0.8012253446281766,
+ "grad_norm": 0.6825409531593323,
+ "learning_rate": 0.00016677272188886483,
+ "loss": 1.841,
+ "step": 801
+ },
+ {
+ "epoch": 0.8022256259573005,
+ "grad_norm": 0.6831037402153015,
+ "learning_rate": 0.00016669465316669667,
+ "loss": 1.5476,
+ "step": 802
+ },
+ {
+ "epoch": 0.8032259072864243,
+ "grad_norm": 0.6906002759933472,
+ "learning_rate": 0.00016661651115916642,
+ "loss": 1.6866,
+ "step": 803
+ },
+ {
+ "epoch": 0.8042261886155482,
+ "grad_norm": 0.7675560116767883,
+ "learning_rate": 0.00016653829595213794,
+ "loss": 1.5663,
+ "step": 804
+ },
+ {
+ "epoch": 0.805226469944672,
+ "grad_norm": 0.6594063639640808,
+ "learning_rate": 0.00016646000763155568,
+ "loss": 1.5247,
+ "step": 805
+ },
+ {
+ "epoch": 0.8062267512737957,
+ "grad_norm": 0.7470384836196899,
+ "learning_rate": 0.00016638164628344425,
+ "loss": 1.6468,
+ "step": 806
+ },
+ {
+ "epoch": 0.8072270326029196,
+ "grad_norm": 0.6874479651451111,
+ "learning_rate": 0.00016630321199390867,
+ "loss": 1.5948,
+ "step": 807
+ },
+ {
+ "epoch": 0.8082273139320434,
+ "grad_norm": 0.7301204204559326,
+ "learning_rate": 0.00016622470484913406,
+ "loss": 1.3922,
+ "step": 808
+ },
+ {
+ "epoch": 0.8092275952611672,
+ "grad_norm": 0.6781039834022522,
+ "learning_rate": 0.00016614612493538551,
+ "loss": 1.6054,
+ "step": 809
+ },
+ {
+ "epoch": 0.810227876590291,
+ "grad_norm": 0.6913226246833801,
+ "learning_rate": 0.00016606747233900815,
+ "loss": 1.5754,
+ "step": 810
+ },
+ {
+ "epoch": 0.8112281579194148,
+ "grad_norm": 0.667425811290741,
+ "learning_rate": 0.00016598874714642697,
+ "loss": 1.8492,
+ "step": 811
+ },
+ {
+ "epoch": 0.8122284392485386,
+ "grad_norm": 0.7662241458892822,
+ "learning_rate": 0.00016590994944414678,
+ "loss": 1.8034,
+ "step": 812
+ },
+ {
+ "epoch": 0.8132287205776625,
+ "grad_norm": 0.7574827075004578,
+ "learning_rate": 0.00016583107931875192,
+ "loss": 1.7435,
+ "step": 813
+ },
+ {
+ "epoch": 0.8142290019067863,
+ "grad_norm": 0.9005519151687622,
+ "learning_rate": 0.0001657521368569064,
+ "loss": 1.6769,
+ "step": 814
+ },
+ {
+ "epoch": 0.8152292832359102,
+ "grad_norm": 0.6895585656166077,
+ "learning_rate": 0.0001656731221453537,
+ "loss": 1.7562,
+ "step": 815
+ },
+ {
+ "epoch": 0.8162295645650339,
+ "grad_norm": 0.7573346495628357,
+ "learning_rate": 0.00016559403527091675,
+ "loss": 1.4748,
+ "step": 816
+ },
+ {
+ "epoch": 0.8172298458941577,
+ "grad_norm": 0.7698647975921631,
+ "learning_rate": 0.0001655148763204977,
+ "loss": 1.6174,
+ "step": 817
+ },
+ {
+ "epoch": 0.8182301272232816,
+ "grad_norm": 0.7975410223007202,
+ "learning_rate": 0.00016543564538107797,
+ "loss": 1.7924,
+ "step": 818
+ },
+ {
+ "epoch": 0.8192304085524054,
+ "grad_norm": 0.9687625169754028,
+ "learning_rate": 0.00016535634253971794,
+ "loss": 1.7725,
+ "step": 819
+ },
+ {
+ "epoch": 0.8202306898815291,
+ "grad_norm": 0.6777274012565613,
+ "learning_rate": 0.00016527696788355714,
+ "loss": 1.5018,
+ "step": 820
+ },
+ {
+ "epoch": 0.821230971210653,
+ "grad_norm": 0.6990464329719543,
+ "learning_rate": 0.00016519752149981397,
+ "loss": 1.5804,
+ "step": 821
+ },
+ {
+ "epoch": 0.8222312525397768,
+ "grad_norm": 0.8445940613746643,
+ "learning_rate": 0.0001651180034757856,
+ "loss": 1.8591,
+ "step": 822
+ },
+ {
+ "epoch": 0.8232315338689006,
+ "grad_norm": 0.8462644815444946,
+ "learning_rate": 0.00016503841389884798,
+ "loss": 1.7582,
+ "step": 823
+ },
+ {
+ "epoch": 0.8242318151980245,
+ "grad_norm": 0.7679311037063599,
+ "learning_rate": 0.00016495875285645566,
+ "loss": 1.5971,
+ "step": 824
+ },
+ {
+ "epoch": 0.8252320965271482,
+ "grad_norm": 0.7734447717666626,
+ "learning_rate": 0.00016487902043614173,
+ "loss": 1.714,
+ "step": 825
+ },
+ {
+ "epoch": 0.8262323778562721,
+ "grad_norm": 0.7890239953994751,
+ "learning_rate": 0.0001647992167255177,
+ "loss": 1.6876,
+ "step": 826
+ },
+ {
+ "epoch": 0.8272326591853959,
+ "grad_norm": 0.8530203104019165,
+ "learning_rate": 0.0001647193418122734,
+ "loss": 1.9096,
+ "step": 827
+ },
+ {
+ "epoch": 0.8282329405145197,
+ "grad_norm": 0.7828260064125061,
+ "learning_rate": 0.00016463939578417692,
+ "loss": 1.5518,
+ "step": 828
+ },
+ {
+ "epoch": 0.8292332218436436,
+ "grad_norm": 0.7015512585639954,
+ "learning_rate": 0.0001645593787290745,
+ "loss": 1.49,
+ "step": 829
+ },
+ {
+ "epoch": 0.8302335031727673,
+ "grad_norm": 0.694771409034729,
+ "learning_rate": 0.0001644792907348904,
+ "loss": 1.5506,
+ "step": 830
+ },
+ {
+ "epoch": 0.8312337845018911,
+ "grad_norm": 0.8167857527732849,
+ "learning_rate": 0.00016439913188962685,
+ "loss": 1.7798,
+ "step": 831
+ },
+ {
+ "epoch": 0.832234065831015,
+ "grad_norm": 0.6682108044624329,
+ "learning_rate": 0.0001643189022813639,
+ "loss": 1.6107,
+ "step": 832
+ },
+ {
+ "epoch": 0.8332343471601388,
+ "grad_norm": 0.8347259163856506,
+ "learning_rate": 0.0001642386019982594,
+ "loss": 1.7672,
+ "step": 833
+ },
+ {
+ "epoch": 0.8342346284892626,
+ "grad_norm": 0.6620945334434509,
+ "learning_rate": 0.00016415823112854883,
+ "loss": 1.6975,
+ "step": 834
+ },
+ {
+ "epoch": 0.8352349098183864,
+ "grad_norm": 0.7286327481269836,
+ "learning_rate": 0.00016407778976054526,
+ "loss": 1.5956,
+ "step": 835
+ },
+ {
+ "epoch": 0.8362351911475102,
+ "grad_norm": 0.6344440579414368,
+ "learning_rate": 0.0001639972779826392,
+ "loss": 1.6455,
+ "step": 836
+ },
+ {
+ "epoch": 0.8372354724766341,
+ "grad_norm": 0.6607793569564819,
+ "learning_rate": 0.0001639166958832985,
+ "loss": 1.6739,
+ "step": 837
+ },
+ {
+ "epoch": 0.8382357538057579,
+ "grad_norm": 0.6973574161529541,
+ "learning_rate": 0.00016383604355106837,
+ "loss": 1.8042,
+ "step": 838
+ },
+ {
+ "epoch": 0.8392360351348817,
+ "grad_norm": 0.7744210958480835,
+ "learning_rate": 0.00016375532107457108,
+ "loss": 1.528,
+ "step": 839
+ },
+ {
+ "epoch": 0.8402363164640055,
+ "grad_norm": 0.6944973468780518,
+ "learning_rate": 0.00016367452854250603,
+ "loss": 1.5498,
+ "step": 840
+ },
+ {
+ "epoch": 0.8412365977931293,
+ "grad_norm": 0.6730696558952332,
+ "learning_rate": 0.00016359366604364972,
+ "loss": 1.5849,
+ "step": 841
+ },
+ {
+ "epoch": 0.8422368791222531,
+ "grad_norm": 0.7051465511322021,
+ "learning_rate": 0.00016351273366685526,
+ "loss": 1.5972,
+ "step": 842
+ },
+ {
+ "epoch": 0.843237160451377,
+ "grad_norm": 0.7309426069259644,
+ "learning_rate": 0.00016343173150105278,
+ "loss": 1.4612,
+ "step": 843
+ },
+ {
+ "epoch": 0.8442374417805008,
+ "grad_norm": 0.7830431461334229,
+ "learning_rate": 0.00016335065963524897,
+ "loss": 1.7208,
+ "step": 844
+ },
+ {
+ "epoch": 0.8452377231096245,
+ "grad_norm": 0.8609834909439087,
+ "learning_rate": 0.0001632695181585272,
+ "loss": 1.8229,
+ "step": 845
+ },
+ {
+ "epoch": 0.8462380044387484,
+ "grad_norm": 0.7489060759544373,
+ "learning_rate": 0.00016318830716004722,
+ "loss": 1.6955,
+ "step": 846
+ },
+ {
+ "epoch": 0.8472382857678722,
+ "grad_norm": 0.636900782585144,
+ "learning_rate": 0.00016310702672904528,
+ "loss": 1.6664,
+ "step": 847
+ },
+ {
+ "epoch": 0.8482385670969961,
+ "grad_norm": 0.6423529386520386,
+ "learning_rate": 0.00016302567695483382,
+ "loss": 1.5356,
+ "step": 848
+ },
+ {
+ "epoch": 0.8492388484261199,
+ "grad_norm": 0.7380033731460571,
+ "learning_rate": 0.0001629442579268016,
+ "loss": 1.4482,
+ "step": 849
+ },
+ {
+ "epoch": 0.8502391297552436,
+ "grad_norm": 0.8258544206619263,
+ "learning_rate": 0.00016286276973441333,
+ "loss": 1.7058,
+ "step": 850
+ },
+ {
+ "epoch": 0.8512394110843675,
+ "grad_norm": 0.6473391056060791,
+ "learning_rate": 0.00016278121246720987,
+ "loss": 1.5374,
+ "step": 851
+ },
+ {
+ "epoch": 0.8522396924134913,
+ "grad_norm": 0.7097072005271912,
+ "learning_rate": 0.00016269958621480788,
+ "loss": 1.6786,
+ "step": 852
+ },
+ {
+ "epoch": 0.8532399737426151,
+ "grad_norm": 0.724993884563446,
+ "learning_rate": 0.0001626178910668998,
+ "loss": 1.6022,
+ "step": 853
+ },
+ {
+ "epoch": 0.854240255071739,
+ "grad_norm": 0.6800474524497986,
+ "learning_rate": 0.00016253612711325386,
+ "loss": 1.6382,
+ "step": 854
+ },
+ {
+ "epoch": 0.8552405364008627,
+ "grad_norm": 0.6339759826660156,
+ "learning_rate": 0.0001624542944437139,
+ "loss": 1.5641,
+ "step": 855
+ },
+ {
+ "epoch": 0.8562408177299866,
+ "grad_norm": 0.6792349219322205,
+ "learning_rate": 0.00016237239314819917,
+ "loss": 1.3713,
+ "step": 856
+ },
+ {
+ "epoch": 0.8572410990591104,
+ "grad_norm": 0.6544696688652039,
+ "learning_rate": 0.0001622904233167044,
+ "loss": 1.5639,
+ "step": 857
+ },
+ {
+ "epoch": 0.8582413803882342,
+ "grad_norm": 0.7736073732376099,
+ "learning_rate": 0.0001622083850392996,
+ "loss": 1.5454,
+ "step": 858
+ },
+ {
+ "epoch": 0.859241661717358,
+ "grad_norm": 0.8642422556877136,
+ "learning_rate": 0.00016212627840613003,
+ "loss": 1.6852,
+ "step": 859
+ },
+ {
+ "epoch": 0.8602419430464818,
+ "grad_norm": 0.6520773768424988,
+ "learning_rate": 0.000162044103507416,
+ "loss": 1.5335,
+ "step": 860
+ },
+ {
+ "epoch": 0.8612422243756056,
+ "grad_norm": 0.7647336721420288,
+ "learning_rate": 0.00016196186043345288,
+ "loss": 1.5578,
+ "step": 861
+ },
+ {
+ "epoch": 0.8622425057047295,
+ "grad_norm": 0.9621163010597229,
+ "learning_rate": 0.00016187954927461093,
+ "loss": 1.6976,
+ "step": 862
+ },
+ {
+ "epoch": 0.8632427870338533,
+ "grad_norm": 0.6847056746482849,
+ "learning_rate": 0.00016179717012133521,
+ "loss": 1.7118,
+ "step": 863
+ },
+ {
+ "epoch": 0.864243068362977,
+ "grad_norm": 0.7482467889785767,
+ "learning_rate": 0.00016171472306414554,
+ "loss": 1.6601,
+ "step": 864
+ },
+ {
+ "epoch": 0.8652433496921009,
+ "grad_norm": 0.7760444283485413,
+ "learning_rate": 0.00016163220819363628,
+ "loss": 1.5587,
+ "step": 865
+ },
+ {
+ "epoch": 0.8662436310212247,
+ "grad_norm": 0.8380980491638184,
+ "learning_rate": 0.00016154962560047643,
+ "loss": 1.7171,
+ "step": 866
+ },
+ {
+ "epoch": 0.8672439123503486,
+ "grad_norm": 0.6927618384361267,
+ "learning_rate": 0.00016146697537540924,
+ "loss": 1.7244,
+ "step": 867
+ },
+ {
+ "epoch": 0.8682441936794724,
+ "grad_norm": 0.7855746746063232,
+ "learning_rate": 0.0001613842576092524,
+ "loss": 1.5848,
+ "step": 868
+ },
+ {
+ "epoch": 0.8692444750085961,
+ "grad_norm": 0.6743006110191345,
+ "learning_rate": 0.00016130147239289778,
+ "loss": 1.6969,
+ "step": 869
+ },
+ {
+ "epoch": 0.87024475633772,
+ "grad_norm": 0.7060980200767517,
+ "learning_rate": 0.00016121861981731135,
+ "loss": 1.5632,
+ "step": 870
+ },
+ {
+ "epoch": 0.8712450376668438,
+ "grad_norm": 0.7673144340515137,
+ "learning_rate": 0.00016113569997353312,
+ "loss": 1.5687,
+ "step": 871
+ },
+ {
+ "epoch": 0.8722453189959676,
+ "grad_norm": 0.8105847239494324,
+ "learning_rate": 0.000161052712952677,
+ "loss": 1.6074,
+ "step": 872
+ },
+ {
+ "epoch": 0.8732456003250915,
+ "grad_norm": 0.6536850333213806,
+ "learning_rate": 0.0001609696588459307,
+ "loss": 1.5842,
+ "step": 873
+ },
+ {
+ "epoch": 0.8742458816542152,
+ "grad_norm": 0.6653574705123901,
+ "learning_rate": 0.00016088653774455568,
+ "loss": 1.4652,
+ "step": 874
+ },
+ {
+ "epoch": 0.875246162983339,
+ "grad_norm": 0.7202721238136292,
+ "learning_rate": 0.00016080334973988695,
+ "loss": 1.5212,
+ "step": 875
+ },
+ {
+ "epoch": 0.8762464443124629,
+ "grad_norm": 0.8218807578086853,
+ "learning_rate": 0.00016072009492333318,
+ "loss": 1.803,
+ "step": 876
+ },
+ {
+ "epoch": 0.8772467256415867,
+ "grad_norm": 0.6170400381088257,
+ "learning_rate": 0.0001606367733863763,
+ "loss": 1.5313,
+ "step": 877
+ },
+ {
+ "epoch": 0.8782470069707106,
+ "grad_norm": 0.6750448346138,
+ "learning_rate": 0.00016055338522057158,
+ "loss": 1.6183,
+ "step": 878
+ },
+ {
+ "epoch": 0.8792472882998343,
+ "grad_norm": 0.6602128148078918,
+ "learning_rate": 0.00016046993051754756,
+ "loss": 1.6669,
+ "step": 879
+ },
+ {
+ "epoch": 0.8802475696289581,
+ "grad_norm": 0.7064031958580017,
+ "learning_rate": 0.00016038640936900586,
+ "loss": 1.7458,
+ "step": 880
+ },
+ {
+ "epoch": 0.881247850958082,
+ "grad_norm": 0.5916783809661865,
+ "learning_rate": 0.00016030282186672116,
+ "loss": 1.4966,
+ "step": 881
+ },
+ {
+ "epoch": 0.8822481322872058,
+ "grad_norm": 0.7189202904701233,
+ "learning_rate": 0.00016021916810254097,
+ "loss": 1.5812,
+ "step": 882
+ },
+ {
+ "epoch": 0.8832484136163296,
+ "grad_norm": 0.7760966420173645,
+ "learning_rate": 0.00016013544816838565,
+ "loss": 1.6709,
+ "step": 883
+ },
+ {
+ "epoch": 0.8842486949454534,
+ "grad_norm": 0.6894650459289551,
+ "learning_rate": 0.00016005166215624827,
+ "loss": 1.6255,
+ "step": 884
+ },
+ {
+ "epoch": 0.8852489762745772,
+ "grad_norm": 0.6777058839797974,
+ "learning_rate": 0.0001599678101581945,
+ "loss": 1.7479,
+ "step": 885
+ },
+ {
+ "epoch": 0.886249257603701,
+ "grad_norm": 0.7056024670600891,
+ "learning_rate": 0.00015988389226636253,
+ "loss": 1.7896,
+ "step": 886
+ },
+ {
+ "epoch": 0.8872495389328249,
+ "grad_norm": 0.6465604305267334,
+ "learning_rate": 0.00015979990857296295,
+ "loss": 1.7363,
+ "step": 887
+ },
+ {
+ "epoch": 0.8882498202619487,
+ "grad_norm": 0.6703017950057983,
+ "learning_rate": 0.00015971585917027862,
+ "loss": 1.6617,
+ "step": 888
+ },
+ {
+ "epoch": 0.8892501015910725,
+ "grad_norm": 0.7116142511367798,
+ "learning_rate": 0.00015963174415066468,
+ "loss": 1.8232,
+ "step": 889
+ },
+ {
+ "epoch": 0.8902503829201963,
+ "grad_norm": 0.7552229762077332,
+ "learning_rate": 0.0001595475636065483,
+ "loss": 1.7847,
+ "step": 890
+ },
+ {
+ "epoch": 0.8912506642493201,
+ "grad_norm": 0.70728999376297,
+ "learning_rate": 0.00015946331763042867,
+ "loss": 1.5665,
+ "step": 891
+ },
+ {
+ "epoch": 0.892250945578444,
+ "grad_norm": 0.6701356768608093,
+ "learning_rate": 0.00015937900631487686,
+ "loss": 1.3572,
+ "step": 892
+ },
+ {
+ "epoch": 0.8932512269075678,
+ "grad_norm": 0.6960388422012329,
+ "learning_rate": 0.00015929462975253585,
+ "loss": 1.5815,
+ "step": 893
+ },
+ {
+ "epoch": 0.8942515082366915,
+ "grad_norm": 0.6505674719810486,
+ "learning_rate": 0.00015921018803612014,
+ "loss": 1.7499,
+ "step": 894
+ },
+ {
+ "epoch": 0.8952517895658154,
+ "grad_norm": 0.604205310344696,
+ "learning_rate": 0.0001591256812584159,
+ "loss": 1.6838,
+ "step": 895
+ },
+ {
+ "epoch": 0.8962520708949392,
+ "grad_norm": 0.5875198841094971,
+ "learning_rate": 0.00015904110951228082,
+ "loss": 1.5147,
+ "step": 896
+ },
+ {
+ "epoch": 0.897252352224063,
+ "grad_norm": 0.6970433592796326,
+ "learning_rate": 0.00015895647289064396,
+ "loss": 1.7767,
+ "step": 897
+ },
+ {
+ "epoch": 0.8982526335531869,
+ "grad_norm": 0.7364515066146851,
+ "learning_rate": 0.00015887177148650564,
+ "loss": 1.6672,
+ "step": 898
+ },
+ {
+ "epoch": 0.8992529148823106,
+ "grad_norm": 0.7843589186668396,
+ "learning_rate": 0.0001587870053929374,
+ "loss": 1.689,
+ "step": 899
+ },
+ {
+ "epoch": 0.9002531962114345,
+ "grad_norm": 0.6405196189880371,
+ "learning_rate": 0.00015870217470308188,
+ "loss": 1.5917,
+ "step": 900
+ },
+ {
+ "epoch": 0.9012534775405583,
+ "grad_norm": 0.7019757628440857,
+ "learning_rate": 0.0001586172795101526,
+ "loss": 1.5497,
+ "step": 901
+ },
+ {
+ "epoch": 0.9022537588696821,
+ "grad_norm": 0.8048270344734192,
+ "learning_rate": 0.00015853231990743406,
+ "loss": 1.5821,
+ "step": 902
+ },
+ {
+ "epoch": 0.903254040198806,
+ "grad_norm": 0.6245777606964111,
+ "learning_rate": 0.0001584472959882815,
+ "loss": 1.5688,
+ "step": 903
+ },
+ {
+ "epoch": 0.9042543215279297,
+ "grad_norm": 0.6584132313728333,
+ "learning_rate": 0.00015836220784612085,
+ "loss": 1.4555,
+ "step": 904
+ },
+ {
+ "epoch": 0.9052546028570535,
+ "grad_norm": 0.7710773944854736,
+ "learning_rate": 0.00015827705557444852,
+ "loss": 1.6416,
+ "step": 905
+ },
+ {
+ "epoch": 0.9062548841861774,
+ "grad_norm": 0.6738126277923584,
+ "learning_rate": 0.00015819183926683153,
+ "loss": 1.6272,
+ "step": 906
+ },
+ {
+ "epoch": 0.9072551655153012,
+ "grad_norm": 0.6698735356330872,
+ "learning_rate": 0.00015810655901690715,
+ "loss": 1.4778,
+ "step": 907
+ },
+ {
+ "epoch": 0.9082554468444249,
+ "grad_norm": 1.0088928937911987,
+ "learning_rate": 0.00015802121491838297,
+ "loss": 1.6854,
+ "step": 908
+ },
+ {
+ "epoch": 0.9092557281735488,
+ "grad_norm": 0.6948708891868591,
+ "learning_rate": 0.0001579358070650367,
+ "loss": 1.5673,
+ "step": 909
+ },
+ {
+ "epoch": 0.9102560095026726,
+ "grad_norm": 0.6728948950767517,
+ "learning_rate": 0.00015785033555071616,
+ "loss": 1.6646,
+ "step": 910
+ },
+ {
+ "epoch": 0.9112562908317965,
+ "grad_norm": 0.8096952438354492,
+ "learning_rate": 0.00015776480046933905,
+ "loss": 1.4675,
+ "step": 911
+ },
+ {
+ "epoch": 0.9122565721609203,
+ "grad_norm": 0.6625403761863708,
+ "learning_rate": 0.000157679201914893,
+ "loss": 1.4793,
+ "step": 912
+ },
+ {
+ "epoch": 0.913256853490044,
+ "grad_norm": 0.7129424810409546,
+ "learning_rate": 0.00015759353998143528,
+ "loss": 1.574,
+ "step": 913
+ },
+ {
+ "epoch": 0.9142571348191679,
+ "grad_norm": 0.6151349544525146,
+ "learning_rate": 0.00015750781476309288,
+ "loss": 1.5631,
+ "step": 914
+ },
+ {
+ "epoch": 0.9152574161482917,
+ "grad_norm": 0.7185074687004089,
+ "learning_rate": 0.00015742202635406235,
+ "loss": 1.8382,
+ "step": 915
+ },
+ {
+ "epoch": 0.9162576974774155,
+ "grad_norm": 0.7076066732406616,
+ "learning_rate": 0.00015733617484860963,
+ "loss": 1.5394,
+ "step": 916
+ },
+ {
+ "epoch": 0.9172579788065394,
+ "grad_norm": 0.7286276817321777,
+ "learning_rate": 0.00015725026034106996,
+ "loss": 1.8139,
+ "step": 917
+ },
+ {
+ "epoch": 0.9182582601356631,
+ "grad_norm": 0.757075846195221,
+ "learning_rate": 0.00015716428292584787,
+ "loss": 1.6768,
+ "step": 918
+ },
+ {
+ "epoch": 0.919258541464787,
+ "grad_norm": 0.6926739811897278,
+ "learning_rate": 0.00015707824269741702,
+ "loss": 1.4541,
+ "step": 919
+ },
+ {
+ "epoch": 0.9202588227939108,
+ "grad_norm": 0.6489847898483276,
+ "learning_rate": 0.00015699213975031996,
+ "loss": 1.4725,
+ "step": 920
+ },
+ {
+ "epoch": 0.9212591041230346,
+ "grad_norm": 0.7668707966804504,
+ "learning_rate": 0.0001569059741791684,
+ "loss": 1.4239,
+ "step": 921
+ },
+ {
+ "epoch": 0.9222593854521585,
+ "grad_norm": 0.736863911151886,
+ "learning_rate": 0.0001568197460786426,
+ "loss": 1.6117,
+ "step": 922
+ },
+ {
+ "epoch": 0.9232596667812822,
+ "grad_norm": 0.8462884426116943,
+ "learning_rate": 0.0001567334555434917,
+ "loss": 1.5025,
+ "step": 923
+ },
+ {
+ "epoch": 0.924259948110406,
+ "grad_norm": 0.7481950521469116,
+ "learning_rate": 0.0001566471026685334,
+ "loss": 1.5024,
+ "step": 924
+ },
+ {
+ "epoch": 0.9252602294395299,
+ "grad_norm": 0.6457516551017761,
+ "learning_rate": 0.00015656068754865387,
+ "loss": 1.4526,
+ "step": 925
+ },
+ {
+ "epoch": 0.9262605107686537,
+ "grad_norm": 0.809140682220459,
+ "learning_rate": 0.00015647421027880772,
+ "loss": 1.4449,
+ "step": 926
+ },
+ {
+ "epoch": 0.9272607920977775,
+ "grad_norm": 0.6967790126800537,
+ "learning_rate": 0.0001563876709540178,
+ "loss": 1.5552,
+ "step": 927
+ },
+ {
+ "epoch": 0.9282610734269013,
+ "grad_norm": 0.6858595609664917,
+ "learning_rate": 0.0001563010696693752,
+ "loss": 1.6202,
+ "step": 928
+ },
+ {
+ "epoch": 0.9292613547560251,
+ "grad_norm": 0.7033559679985046,
+ "learning_rate": 0.00015621440652003907,
+ "loss": 1.7186,
+ "step": 929
+ },
+ {
+ "epoch": 0.930261636085149,
+ "grad_norm": 0.6527283787727356,
+ "learning_rate": 0.00015612768160123652,
+ "loss": 1.5028,
+ "step": 930
+ },
+ {
+ "epoch": 0.9312619174142728,
+ "grad_norm": 0.7243602275848389,
+ "learning_rate": 0.00015604089500826257,
+ "loss": 1.6729,
+ "step": 931
+ },
+ {
+ "epoch": 0.9322621987433966,
+ "grad_norm": 0.6734297275543213,
+ "learning_rate": 0.00015595404683648,
+ "loss": 1.4731,
+ "step": 932
+ },
+ {
+ "epoch": 0.9332624800725204,
+ "grad_norm": 0.7641247510910034,
+ "learning_rate": 0.00015586713718131922,
+ "loss": 1.5851,
+ "step": 933
+ },
+ {
+ "epoch": 0.9342627614016442,
+ "grad_norm": 0.7062788009643555,
+ "learning_rate": 0.0001557801661382782,
+ "loss": 1.5735,
+ "step": 934
+ },
+ {
+ "epoch": 0.935263042730768,
+ "grad_norm": 0.6413556337356567,
+ "learning_rate": 0.00015569313380292248,
+ "loss": 1.5854,
+ "step": 935
+ },
+ {
+ "epoch": 0.9362633240598919,
+ "grad_norm": 0.645720362663269,
+ "learning_rate": 0.00015560604027088477,
+ "loss": 1.5072,
+ "step": 936
+ },
+ {
+ "epoch": 0.9372636053890157,
+ "grad_norm": 0.6726225018501282,
+ "learning_rate": 0.00015551888563786515,
+ "loss": 1.587,
+ "step": 937
+ },
+ {
+ "epoch": 0.9382638867181394,
+ "grad_norm": 0.7043680548667908,
+ "learning_rate": 0.00015543166999963076,
+ "loss": 1.6577,
+ "step": 938
+ },
+ {
+ "epoch": 0.9392641680472633,
+ "grad_norm": 0.7049617767333984,
+ "learning_rate": 0.0001553443934520159,
+ "loss": 1.7624,
+ "step": 939
+ },
+ {
+ "epoch": 0.9402644493763871,
+ "grad_norm": 0.7060776352882385,
+ "learning_rate": 0.00015525705609092157,
+ "loss": 1.6208,
+ "step": 940
+ },
+ {
+ "epoch": 0.941264730705511,
+ "grad_norm": 0.6215025186538696,
+ "learning_rate": 0.00015516965801231586,
+ "loss": 1.4645,
+ "step": 941
+ },
+ {
+ "epoch": 0.9422650120346348,
+ "grad_norm": 0.7021099328994751,
+ "learning_rate": 0.0001550821993122334,
+ "loss": 1.566,
+ "step": 942
+ },
+ {
+ "epoch": 0.9432652933637585,
+ "grad_norm": 0.6451042294502258,
+ "learning_rate": 0.0001549946800867755,
+ "loss": 1.7491,
+ "step": 943
+ },
+ {
+ "epoch": 0.9442655746928824,
+ "grad_norm": 0.7288572192192078,
+ "learning_rate": 0.00015490710043210997,
+ "loss": 1.6302,
+ "step": 944
+ },
+ {
+ "epoch": 0.9452658560220062,
+ "grad_norm": 0.7850833535194397,
+ "learning_rate": 0.00015481946044447099,
+ "loss": 1.5673,
+ "step": 945
+ },
+ {
+ "epoch": 0.94626613735113,
+ "grad_norm": 0.7459181547164917,
+ "learning_rate": 0.00015473176022015906,
+ "loss": 1.4529,
+ "step": 946
+ },
+ {
+ "epoch": 0.9472664186802539,
+ "grad_norm": 0.7002627849578857,
+ "learning_rate": 0.0001546439998555409,
+ "loss": 1.8814,
+ "step": 947
+ },
+ {
+ "epoch": 0.9482667000093776,
+ "grad_norm": 0.6664572358131409,
+ "learning_rate": 0.0001545561794470492,
+ "loss": 1.5337,
+ "step": 948
+ },
+ {
+ "epoch": 0.9492669813385014,
+ "grad_norm": 0.757116973400116,
+ "learning_rate": 0.00015446829909118275,
+ "loss": 1.5775,
+ "step": 949
+ },
+ {
+ "epoch": 0.9502672626676253,
+ "grad_norm": 0.7456643581390381,
+ "learning_rate": 0.00015438035888450623,
+ "loss": 1.525,
+ "step": 950
+ },
+ {
+ "epoch": 0.9512675439967491,
+ "grad_norm": 0.6722500920295715,
+ "learning_rate": 0.00015429235892364994,
+ "loss": 1.5059,
+ "step": 951
+ },
+ {
+ "epoch": 0.952267825325873,
+ "grad_norm": 0.7431210279464722,
+ "learning_rate": 0.00015420429930530996,
+ "loss": 1.6867,
+ "step": 952
+ },
+ {
+ "epoch": 0.9532681066549967,
+ "grad_norm": 0.751015305519104,
+ "learning_rate": 0.00015411618012624786,
+ "loss": 1.7371,
+ "step": 953
+ },
+ {
+ "epoch": 0.9542683879841205,
+ "grad_norm": 0.807579517364502,
+ "learning_rate": 0.00015402800148329071,
+ "loss": 1.7353,
+ "step": 954
+ },
+ {
+ "epoch": 0.9552686693132444,
+ "grad_norm": 0.608161449432373,
+ "learning_rate": 0.00015393976347333088,
+ "loss": 1.3074,
+ "step": 955
+ },
+ {
+ "epoch": 0.9562689506423682,
+ "grad_norm": 0.7092815637588501,
+ "learning_rate": 0.00015385146619332596,
+ "loss": 1.676,
+ "step": 956
+ },
+ {
+ "epoch": 0.9572692319714919,
+ "grad_norm": 0.7639429569244385,
+ "learning_rate": 0.00015376310974029873,
+ "loss": 1.6452,
+ "step": 957
+ },
+ {
+ "epoch": 0.9582695133006158,
+ "grad_norm": 0.7333659529685974,
+ "learning_rate": 0.00015367469421133695,
+ "loss": 1.6821,
+ "step": 958
+ },
+ {
+ "epoch": 0.9592697946297396,
+ "grad_norm": 0.7246838212013245,
+ "learning_rate": 0.00015358621970359325,
+ "loss": 1.5078,
+ "step": 959
+ },
+ {
+ "epoch": 0.9602700759588634,
+ "grad_norm": 0.7209622859954834,
+ "learning_rate": 0.00015349768631428519,
+ "loss": 1.5617,
+ "step": 960
+ },
+ {
+ "epoch": 0.9612703572879873,
+ "grad_norm": 0.7034916877746582,
+ "learning_rate": 0.00015340909414069488,
+ "loss": 1.4711,
+ "step": 961
+ },
+ {
+ "epoch": 0.962270638617111,
+ "grad_norm": 0.7311360239982605,
+ "learning_rate": 0.00015332044328016914,
+ "loss": 1.6488,
+ "step": 962
+ },
+ {
+ "epoch": 0.9632709199462349,
+ "grad_norm": 0.6668992638587952,
+ "learning_rate": 0.0001532317338301192,
+ "loss": 1.6804,
+ "step": 963
+ },
+ {
+ "epoch": 0.9642712012753587,
+ "grad_norm": 0.6265329122543335,
+ "learning_rate": 0.00015314296588802076,
+ "loss": 1.8169,
+ "step": 964
+ },
+ {
+ "epoch": 0.9652714826044825,
+ "grad_norm": 0.6945448517799377,
+ "learning_rate": 0.00015305413955141365,
+ "loss": 1.8041,
+ "step": 965
+ },
+ {
+ "epoch": 0.9662717639336064,
+ "grad_norm": 0.6718643307685852,
+ "learning_rate": 0.00015296525491790205,
+ "loss": 1.3486,
+ "step": 966
+ },
+ {
+ "epoch": 0.9672720452627301,
+ "grad_norm": 0.6232700943946838,
+ "learning_rate": 0.00015287631208515406,
+ "loss": 1.5672,
+ "step": 967
+ },
+ {
+ "epoch": 0.9682723265918539,
+ "grad_norm": 0.7481172680854797,
+ "learning_rate": 0.00015278731115090171,
+ "loss": 1.5992,
+ "step": 968
+ },
+ {
+ "epoch": 0.9692726079209778,
+ "grad_norm": 0.6585466861724854,
+ "learning_rate": 0.00015269825221294098,
+ "loss": 1.6403,
+ "step": 969
+ },
+ {
+ "epoch": 0.9702728892501016,
+ "grad_norm": 0.7587956786155701,
+ "learning_rate": 0.00015260913536913154,
+ "loss": 1.7991,
+ "step": 970
+ },
+ {
+ "epoch": 0.9712731705792254,
+ "grad_norm": 0.672698974609375,
+ "learning_rate": 0.00015251996071739664,
+ "loss": 1.4311,
+ "step": 971
+ },
+ {
+ "epoch": 0.9722734519083492,
+ "grad_norm": 0.7597199082374573,
+ "learning_rate": 0.00015243072835572318,
+ "loss": 1.5692,
+ "step": 972
+ },
+ {
+ "epoch": 0.973273733237473,
+ "grad_norm": 0.7342745661735535,
+ "learning_rate": 0.0001523414383821613,
+ "loss": 1.6364,
+ "step": 973
+ },
+ {
+ "epoch": 0.9742740145665969,
+ "grad_norm": 0.6640815138816833,
+ "learning_rate": 0.00015225209089482462,
+ "loss": 1.5113,
+ "step": 974
+ },
+ {
+ "epoch": 0.9752742958957207,
+ "grad_norm": 0.6298378109931946,
+ "learning_rate": 0.0001521626859918898,
+ "loss": 1.4822,
+ "step": 975
+ },
+ {
+ "epoch": 0.9762745772248445,
+ "grad_norm": 0.6862055659294128,
+ "learning_rate": 0.00015207322377159668,
+ "loss": 1.6159,
+ "step": 976
+ },
+ {
+ "epoch": 0.9772748585539683,
+ "grad_norm": 0.6377236843109131,
+ "learning_rate": 0.00015198370433224805,
+ "loss": 1.6046,
+ "step": 977
+ },
+ {
+ "epoch": 0.9782751398830921,
+ "grad_norm": 0.620070219039917,
+ "learning_rate": 0.00015189412777220958,
+ "loss": 1.589,
+ "step": 978
+ },
+ {
+ "epoch": 0.9792754212122159,
+ "grad_norm": 0.7776119112968445,
+ "learning_rate": 0.00015180449418990976,
+ "loss": 1.485,
+ "step": 979
+ },
+ {
+ "epoch": 0.9802757025413398,
+ "grad_norm": 0.8258413076400757,
+ "learning_rate": 0.00015171480368383964,
+ "loss": 1.5615,
+ "step": 980
+ },
+ {
+ "epoch": 0.9812759838704636,
+ "grad_norm": 0.7297958135604858,
+ "learning_rate": 0.00015162505635255287,
+ "loss": 1.5408,
+ "step": 981
+ },
+ {
+ "epoch": 0.9822762651995874,
+ "grad_norm": 0.5848103165626526,
+ "learning_rate": 0.00015153525229466555,
+ "loss": 1.6821,
+ "step": 982
+ },
+ {
+ "epoch": 0.9832765465287112,
+ "grad_norm": 0.7375655174255371,
+ "learning_rate": 0.00015144539160885613,
+ "loss": 1.7568,
+ "step": 983
+ },
+ {
+ "epoch": 0.984276827857835,
+ "grad_norm": 0.7466885447502136,
+ "learning_rate": 0.00015135547439386516,
+ "loss": 1.5805,
+ "step": 984
+ },
+ {
+ "epoch": 0.9852771091869589,
+ "grad_norm": 0.6645593047142029,
+ "learning_rate": 0.0001512655007484955,
+ "loss": 1.6776,
+ "step": 985
+ },
+ {
+ "epoch": 0.9862773905160827,
+ "grad_norm": 0.7973874807357788,
+ "learning_rate": 0.00015117547077161185,
+ "loss": 1.4931,
+ "step": 986
+ },
+ {
+ "epoch": 0.9872776718452064,
+ "grad_norm": 0.685391902923584,
+ "learning_rate": 0.0001510853845621409,
+ "loss": 1.6254,
+ "step": 987
+ },
+ {
+ "epoch": 0.9882779531743303,
+ "grad_norm": 0.6562414765357971,
+ "learning_rate": 0.00015099524221907107,
+ "loss": 1.6677,
+ "step": 988
+ },
+ {
+ "epoch": 0.9892782345034541,
+ "grad_norm": 0.6216359734535217,
+ "learning_rate": 0.0001509050438414525,
+ "loss": 1.6107,
+ "step": 989
+ },
+ {
+ "epoch": 0.9902785158325779,
+ "grad_norm": 0.7108810544013977,
+ "learning_rate": 0.00015081478952839693,
+ "loss": 1.5268,
+ "step": 990
+ },
+ {
+ "epoch": 0.9912787971617018,
+ "grad_norm": 0.7076026797294617,
+ "learning_rate": 0.00015072447937907753,
+ "loss": 1.3716,
+ "step": 991
+ },
+ {
+ "epoch": 0.9922790784908255,
+ "grad_norm": 0.6056272983551025,
+ "learning_rate": 0.00015063411349272877,
+ "loss": 1.4931,
+ "step": 992
+ },
+ {
+ "epoch": 0.9932793598199494,
+ "grad_norm": 0.726671576499939,
+ "learning_rate": 0.00015054369196864644,
+ "loss": 1.6409,
+ "step": 993
+ },
+ {
+ "epoch": 0.9942796411490732,
+ "grad_norm": 0.7019214630126953,
+ "learning_rate": 0.00015045321490618748,
+ "loss": 1.4476,
+ "step": 994
+ },
+ {
+ "epoch": 0.995279922478197,
+ "grad_norm": 0.755043625831604,
+ "learning_rate": 0.00015036268240476978,
+ "loss": 1.6674,
+ "step": 995
+ },
+ {
+ "epoch": 0.9962802038073209,
+ "grad_norm": 0.7450313568115234,
+ "learning_rate": 0.00015027209456387218,
+ "loss": 1.3706,
+ "step": 996
+ },
+ {
+ "epoch": 0.9972804851364446,
+ "grad_norm": 0.6804680228233337,
+ "learning_rate": 0.00015018145148303438,
+ "loss": 1.3878,
+ "step": 997
+ },
+ {
+ "epoch": 0.9982807664655684,
+ "grad_norm": 0.7353954315185547,
+ "learning_rate": 0.00015009075326185667,
+ "loss": 1.8656,
+ "step": 998
+ },
+ {
+ "epoch": 0.9992810477946923,
+ "grad_norm": 0.7213340401649475,
+ "learning_rate": 0.00015000000000000001,
+ "loss": 1.6031,
+ "step": 999
+ },
+ {
+ "epoch": 1.0002813291238162,
+ "grad_norm": 0.7066403031349182,
+ "learning_rate": 0.00014990919179718584,
+ "loss": 1.3663,
+ "step": 1000
+ },
+ {
+ "epoch": 1.0012816104529398,
+ "grad_norm": 0.6104635000228882,
+ "learning_rate": 0.00014981832875319597,
+ "loss": 1.3155,
+ "step": 1001
+ },
+ {
+ "epoch": 1.0022818917820637,
+ "grad_norm": 0.7524546384811401,
+ "learning_rate": 0.00014972741096787242,
+ "loss": 1.2042,
+ "step": 1002
+ },
+ {
+ "epoch": 1.0032821731111876,
+ "grad_norm": 0.6831395626068115,
+ "learning_rate": 0.0001496364385411174,
+ "loss": 1.3909,
+ "step": 1003
+ },
+ {
+ "epoch": 1.0042824544403113,
+ "grad_norm": 0.6223152875900269,
+ "learning_rate": 0.0001495454115728932,
+ "loss": 1.2693,
+ "step": 1004
+ },
+ {
+ "epoch": 1.0052827357694352,
+ "grad_norm": 0.6630414128303528,
+ "learning_rate": 0.0001494543301632219,
+ "loss": 1.5871,
+ "step": 1005
+ },
+ {
+ "epoch": 1.006283017098559,
+ "grad_norm": 0.6211387515068054,
+ "learning_rate": 0.00014936319441218555,
+ "loss": 1.5096,
+ "step": 1006
+ },
+ {
+ "epoch": 1.0072832984276827,
+ "grad_norm": 0.7009375095367432,
+ "learning_rate": 0.0001492720044199259,
+ "loss": 1.4553,
+ "step": 1007
+ },
+ {
+ "epoch": 1.0082835797568066,
+ "grad_norm": 0.607667088508606,
+ "learning_rate": 0.0001491807602866442,
+ "loss": 1.4655,
+ "step": 1008
+ },
+ {
+ "epoch": 1.0092838610859305,
+ "grad_norm": 0.7168284058570862,
+ "learning_rate": 0.00014908946211260123,
+ "loss": 1.32,
+ "step": 1009
+ },
+ {
+ "epoch": 1.0102841424150542,
+ "grad_norm": 0.6472702622413635,
+ "learning_rate": 0.00014899810999811726,
+ "loss": 1.418,
+ "step": 1010
+ },
+ {
+ "epoch": 1.011284423744178,
+ "grad_norm": 0.6901958584785461,
+ "learning_rate": 0.0001489067040435717,
+ "loss": 1.5842,
+ "step": 1011
+ },
+ {
+ "epoch": 1.012284705073302,
+ "grad_norm": 0.6948314905166626,
+ "learning_rate": 0.00014881524434940313,
+ "loss": 1.3352,
+ "step": 1012
+ },
+ {
+ "epoch": 1.0132849864024256,
+ "grad_norm": 0.6064580082893372,
+ "learning_rate": 0.0001487237310161093,
+ "loss": 1.2467,
+ "step": 1013
+ },
+ {
+ "epoch": 1.0142852677315495,
+ "grad_norm": 0.5783251523971558,
+ "learning_rate": 0.0001486321641442467,
+ "loss": 1.3932,
+ "step": 1014
+ },
+ {
+ "epoch": 1.0152855490606734,
+ "grad_norm": 0.6915367245674133,
+ "learning_rate": 0.00014854054383443081,
+ "loss": 1.5062,
+ "step": 1015
+ },
+ {
+ "epoch": 1.016285830389797,
+ "grad_norm": 0.7143461108207703,
+ "learning_rate": 0.00014844887018733582,
+ "loss": 1.3284,
+ "step": 1016
+ },
+ {
+ "epoch": 1.017286111718921,
+ "grad_norm": 0.7030971050262451,
+ "learning_rate": 0.00014835714330369446,
+ "loss": 1.5919,
+ "step": 1017
+ },
+ {
+ "epoch": 1.0182863930480448,
+ "grad_norm": 0.7102513909339905,
+ "learning_rate": 0.00014826536328429795,
+ "loss": 1.4448,
+ "step": 1018
+ },
+ {
+ "epoch": 1.0192866743771685,
+ "grad_norm": 0.6152640581130981,
+ "learning_rate": 0.000148173530229996,
+ "loss": 1.4771,
+ "step": 1019
+ },
+ {
+ "epoch": 1.0202869557062924,
+ "grad_norm": 0.6302015781402588,
+ "learning_rate": 0.00014808164424169647,
+ "loss": 1.3969,
+ "step": 1020
+ },
+ {
+ "epoch": 1.0212872370354162,
+ "grad_norm": 0.8721572756767273,
+ "learning_rate": 0.0001479897054203655,
+ "loss": 1.3515,
+ "step": 1021
+ },
+ {
+ "epoch": 1.0222875183645401,
+ "grad_norm": 1.096592903137207,
+ "learning_rate": 0.00014789771386702717,
+ "loss": 1.4757,
+ "step": 1022
+ },
+ {
+ "epoch": 1.0232877996936638,
+ "grad_norm": 0.7684335112571716,
+ "learning_rate": 0.0001478056696827636,
+ "loss": 1.2521,
+ "step": 1023
+ },
+ {
+ "epoch": 1.0242880810227877,
+ "grad_norm": 0.6189197301864624,
+ "learning_rate": 0.0001477135729687147,
+ "loss": 1.4304,
+ "step": 1024
+ },
+ {
+ "epoch": 1.0252883623519116,
+ "grad_norm": 0.6061127781867981,
+ "learning_rate": 0.0001476214238260781,
+ "loss": 1.4236,
+ "step": 1025
+ },
+ {
+ "epoch": 1.0262886436810352,
+ "grad_norm": 0.5413788557052612,
+ "learning_rate": 0.000147529222356109,
+ "loss": 1.1392,
+ "step": 1026
+ },
+ {
+ "epoch": 1.0272889250101591,
+ "grad_norm": 0.6879326105117798,
+ "learning_rate": 0.0001474369686601202,
+ "loss": 1.3966,
+ "step": 1027
+ },
+ {
+ "epoch": 1.028289206339283,
+ "grad_norm": 0.817315936088562,
+ "learning_rate": 0.0001473446628394818,
+ "loss": 1.6747,
+ "step": 1028
+ },
+ {
+ "epoch": 1.0292894876684067,
+ "grad_norm": 0.7139183282852173,
+ "learning_rate": 0.00014725230499562119,
+ "loss": 1.5432,
+ "step": 1029
+ },
+ {
+ "epoch": 1.0302897689975306,
+ "grad_norm": 0.7536730766296387,
+ "learning_rate": 0.00014715989523002296,
+ "loss": 1.5839,
+ "step": 1030
+ },
+ {
+ "epoch": 1.0312900503266544,
+ "grad_norm": 0.7000136375427246,
+ "learning_rate": 0.00014706743364422878,
+ "loss": 1.3519,
+ "step": 1031
+ },
+ {
+ "epoch": 1.032290331655778,
+ "grad_norm": 0.6579506993293762,
+ "learning_rate": 0.00014697492033983707,
+ "loss": 1.3622,
+ "step": 1032
+ },
+ {
+ "epoch": 1.033290612984902,
+ "grad_norm": 0.6257238984107971,
+ "learning_rate": 0.00014688235541850337,
+ "loss": 1.4393,
+ "step": 1033
+ },
+ {
+ "epoch": 1.0342908943140259,
+ "grad_norm": 0.749273955821991,
+ "learning_rate": 0.0001467897389819397,
+ "loss": 1.5201,
+ "step": 1034
+ },
+ {
+ "epoch": 1.0352911756431495,
+ "grad_norm": 0.7008610963821411,
+ "learning_rate": 0.00014669707113191483,
+ "loss": 1.3041,
+ "step": 1035
+ },
+ {
+ "epoch": 1.0362914569722734,
+ "grad_norm": 0.6838043332099915,
+ "learning_rate": 0.0001466043519702539,
+ "loss": 1.435,
+ "step": 1036
+ },
+ {
+ "epoch": 1.0372917383013973,
+ "grad_norm": 0.6197534799575806,
+ "learning_rate": 0.00014651158159883855,
+ "loss": 1.3806,
+ "step": 1037
+ },
+ {
+ "epoch": 1.038292019630521,
+ "grad_norm": 0.6906173825263977,
+ "learning_rate": 0.0001464187601196066,
+ "loss": 1.3898,
+ "step": 1038
+ },
+ {
+ "epoch": 1.0392923009596449,
+ "grad_norm": 0.5627701282501221,
+ "learning_rate": 0.00014632588763455212,
+ "loss": 1.3949,
+ "step": 1039
+ },
+ {
+ "epoch": 1.0402925822887688,
+ "grad_norm": 0.6588866710662842,
+ "learning_rate": 0.00014623296424572517,
+ "loss": 1.4041,
+ "step": 1040
+ },
+ {
+ "epoch": 1.0412928636178926,
+ "grad_norm": 0.7941678762435913,
+ "learning_rate": 0.00014613999005523174,
+ "loss": 1.429,
+ "step": 1041
+ },
+ {
+ "epoch": 1.0422931449470163,
+ "grad_norm": 0.5834561586380005,
+ "learning_rate": 0.00014604696516523361,
+ "loss": 1.4007,
+ "step": 1042
+ },
+ {
+ "epoch": 1.0432934262761402,
+ "grad_norm": 0.5992164015769958,
+ "learning_rate": 0.00014595388967794835,
+ "loss": 1.4029,
+ "step": 1043
+ },
+ {
+ "epoch": 1.044293707605264,
+ "grad_norm": 0.6714745759963989,
+ "learning_rate": 0.00014586076369564908,
+ "loss": 1.4421,
+ "step": 1044
+ },
+ {
+ "epoch": 1.0452939889343877,
+ "grad_norm": 0.6675744652748108,
+ "learning_rate": 0.00014576758732066442,
+ "loss": 1.4663,
+ "step": 1045
+ },
+ {
+ "epoch": 1.0462942702635116,
+ "grad_norm": 0.6605483293533325,
+ "learning_rate": 0.00014567436065537835,
+ "loss": 1.3919,
+ "step": 1046
+ },
+ {
+ "epoch": 1.0472945515926355,
+ "grad_norm": 0.6836503744125366,
+ "learning_rate": 0.00014558108380223012,
+ "loss": 1.3428,
+ "step": 1047
+ },
+ {
+ "epoch": 1.0482948329217592,
+ "grad_norm": 0.6451092958450317,
+ "learning_rate": 0.00014548775686371412,
+ "loss": 1.3717,
+ "step": 1048
+ },
+ {
+ "epoch": 1.049295114250883,
+ "grad_norm": 0.6579246520996094,
+ "learning_rate": 0.00014539437994237977,
+ "loss": 1.7364,
+ "step": 1049
+ },
+ {
+ "epoch": 1.050295395580007,
+ "grad_norm": 0.625912070274353,
+ "learning_rate": 0.00014530095314083143,
+ "loss": 1.5574,
+ "step": 1050
+ },
+ {
+ "epoch": 1.0512956769091306,
+ "grad_norm": 0.7133544087409973,
+ "learning_rate": 0.00014520747656172824,
+ "loss": 1.6031,
+ "step": 1051
+ },
+ {
+ "epoch": 1.0522959582382545,
+ "grad_norm": 0.6956666111946106,
+ "learning_rate": 0.00014511395030778406,
+ "loss": 1.6075,
+ "step": 1052
+ },
+ {
+ "epoch": 1.0532962395673784,
+ "grad_norm": 0.7082141041755676,
+ "learning_rate": 0.00014502037448176734,
+ "loss": 1.3839,
+ "step": 1053
+ },
+ {
+ "epoch": 1.054296520896502,
+ "grad_norm": 0.696561872959137,
+ "learning_rate": 0.000144926749186501,
+ "loss": 1.6738,
+ "step": 1054
+ },
+ {
+ "epoch": 1.055296802225626,
+ "grad_norm": 0.6995558142662048,
+ "learning_rate": 0.00014483307452486227,
+ "loss": 1.4732,
+ "step": 1055
+ },
+ {
+ "epoch": 1.0562970835547498,
+ "grad_norm": 0.7434210181236267,
+ "learning_rate": 0.0001447393505997827,
+ "loss": 1.4207,
+ "step": 1056
+ },
+ {
+ "epoch": 1.0572973648838735,
+ "grad_norm": 0.6679419279098511,
+ "learning_rate": 0.00014464557751424793,
+ "loss": 1.397,
+ "step": 1057
+ },
+ {
+ "epoch": 1.0582976462129974,
+ "grad_norm": 0.6747702360153198,
+ "learning_rate": 0.00014455175537129758,
+ "loss": 1.5247,
+ "step": 1058
+ },
+ {
+ "epoch": 1.0592979275421213,
+ "grad_norm": 0.6184663772583008,
+ "learning_rate": 0.00014445788427402528,
+ "loss": 1.2086,
+ "step": 1059
+ },
+ {
+ "epoch": 1.0602982088712452,
+ "grad_norm": 0.6546644568443298,
+ "learning_rate": 0.00014436396432557835,
+ "loss": 1.3795,
+ "step": 1060
+ },
+ {
+ "epoch": 1.0612984902003688,
+ "grad_norm": 0.6418478488922119,
+ "learning_rate": 0.00014426999562915782,
+ "loss": 1.3997,
+ "step": 1061
+ },
+ {
+ "epoch": 1.0622987715294927,
+ "grad_norm": 0.6456977725028992,
+ "learning_rate": 0.00014417597828801832,
+ "loss": 1.347,
+ "step": 1062
+ },
+ {
+ "epoch": 1.0632990528586166,
+ "grad_norm": 0.7379586696624756,
+ "learning_rate": 0.0001440819124054679,
+ "loss": 1.4168,
+ "step": 1063
+ },
+ {
+ "epoch": 1.0642993341877403,
+ "grad_norm": 0.583483099937439,
+ "learning_rate": 0.00014398779808486793,
+ "loss": 1.3724,
+ "step": 1064
+ },
+ {
+ "epoch": 1.0652996155168641,
+ "grad_norm": 0.8881146311759949,
+ "learning_rate": 0.00014389363542963306,
+ "loss": 1.2834,
+ "step": 1065
+ },
+ {
+ "epoch": 1.066299896845988,
+ "grad_norm": 0.6458824276924133,
+ "learning_rate": 0.000143799424543231,
+ "loss": 1.2557,
+ "step": 1066
+ },
+ {
+ "epoch": 1.0673001781751117,
+ "grad_norm": 0.8149404525756836,
+ "learning_rate": 0.0001437051655291825,
+ "loss": 1.5179,
+ "step": 1067
+ },
+ {
+ "epoch": 1.0683004595042356,
+ "grad_norm": 0.8752502202987671,
+ "learning_rate": 0.0001436108584910611,
+ "loss": 1.3922,
+ "step": 1068
+ },
+ {
+ "epoch": 1.0693007408333595,
+ "grad_norm": 0.6741296648979187,
+ "learning_rate": 0.0001435165035324933,
+ "loss": 1.464,
+ "step": 1069
+ },
+ {
+ "epoch": 1.0703010221624831,
+ "grad_norm": 0.6555476784706116,
+ "learning_rate": 0.000143422100757158,
+ "loss": 1.3172,
+ "step": 1070
+ },
+ {
+ "epoch": 1.071301303491607,
+ "grad_norm": 0.660168468952179,
+ "learning_rate": 0.00014332765026878687,
+ "loss": 1.3089,
+ "step": 1071
+ },
+ {
+ "epoch": 1.072301584820731,
+ "grad_norm": 0.8213777542114258,
+ "learning_rate": 0.0001432331521711639,
+ "loss": 1.4487,
+ "step": 1072
+ },
+ {
+ "epoch": 1.0733018661498546,
+ "grad_norm": 0.6642137765884399,
+ "learning_rate": 0.00014313860656812536,
+ "loss": 1.1624,
+ "step": 1073
+ },
+ {
+ "epoch": 1.0743021474789785,
+ "grad_norm": 0.6304247975349426,
+ "learning_rate": 0.00014304401356355983,
+ "loss": 1.2725,
+ "step": 1074
+ },
+ {
+ "epoch": 1.0753024288081023,
+ "grad_norm": 0.6976219415664673,
+ "learning_rate": 0.00014294937326140788,
+ "loss": 1.6664,
+ "step": 1075
+ },
+ {
+ "epoch": 1.076302710137226,
+ "grad_norm": 0.6528605818748474,
+ "learning_rate": 0.00014285468576566207,
+ "loss": 1.3489,
+ "step": 1076
+ },
+ {
+ "epoch": 1.07730299146635,
+ "grad_norm": 0.7203120589256287,
+ "learning_rate": 0.00014275995118036693,
+ "loss": 1.4319,
+ "step": 1077
+ },
+ {
+ "epoch": 1.0783032727954738,
+ "grad_norm": 0.8259358406066895,
+ "learning_rate": 0.00014266516960961852,
+ "loss": 1.3707,
+ "step": 1078
+ },
+ {
+ "epoch": 1.0793035541245974,
+ "grad_norm": 0.9485010504722595,
+ "learning_rate": 0.00014257034115756472,
+ "loss": 1.6787,
+ "step": 1079
+ },
+ {
+ "epoch": 1.0803038354537213,
+ "grad_norm": 0.6732786893844604,
+ "learning_rate": 0.0001424754659284048,
+ "loss": 1.4184,
+ "step": 1080
+ },
+ {
+ "epoch": 1.0813041167828452,
+ "grad_norm": 0.6673377752304077,
+ "learning_rate": 0.0001423805440263895,
+ "loss": 1.5084,
+ "step": 1081
+ },
+ {
+ "epoch": 1.0823043981119689,
+ "grad_norm": 0.6682411432266235,
+ "learning_rate": 0.0001422855755558208,
+ "loss": 1.4034,
+ "step": 1082
+ },
+ {
+ "epoch": 1.0833046794410928,
+ "grad_norm": 0.6940018534660339,
+ "learning_rate": 0.00014219056062105193,
+ "loss": 1.6816,
+ "step": 1083
+ },
+ {
+ "epoch": 1.0843049607702167,
+ "grad_norm": 0.7052391767501831,
+ "learning_rate": 0.0001420954993264871,
+ "loss": 1.5849,
+ "step": 1084
+ },
+ {
+ "epoch": 1.0853052420993405,
+ "grad_norm": 0.7090102434158325,
+ "learning_rate": 0.00014200039177658145,
+ "loss": 1.2906,
+ "step": 1085
+ },
+ {
+ "epoch": 1.0863055234284642,
+ "grad_norm": 0.7664905190467834,
+ "learning_rate": 0.000141905238075841,
+ "loss": 1.5504,
+ "step": 1086
+ },
+ {
+ "epoch": 1.087305804757588,
+ "grad_norm": 0.6557911038398743,
+ "learning_rate": 0.00014181003832882248,
+ "loss": 1.5846,
+ "step": 1087
+ },
+ {
+ "epoch": 1.088306086086712,
+ "grad_norm": 0.6342834234237671,
+ "learning_rate": 0.00014171479264013311,
+ "loss": 1.2405,
+ "step": 1088
+ },
+ {
+ "epoch": 1.0893063674158356,
+ "grad_norm": 0.7152488827705383,
+ "learning_rate": 0.00014161950111443077,
+ "loss": 1.5047,
+ "step": 1089
+ },
+ {
+ "epoch": 1.0903066487449595,
+ "grad_norm": 0.6031161546707153,
+ "learning_rate": 0.00014152416385642357,
+ "loss": 1.3203,
+ "step": 1090
+ },
+ {
+ "epoch": 1.0913069300740834,
+ "grad_norm": 0.6475042700767517,
+ "learning_rate": 0.00014142878097086995,
+ "loss": 1.191,
+ "step": 1091
+ },
+ {
+ "epoch": 1.092307211403207,
+ "grad_norm": 0.7956790924072266,
+ "learning_rate": 0.0001413333525625784,
+ "loss": 1.3601,
+ "step": 1092
+ },
+ {
+ "epoch": 1.093307492732331,
+ "grad_norm": 0.6703265309333801,
+ "learning_rate": 0.00014123787873640754,
+ "loss": 1.374,
+ "step": 1093
+ },
+ {
+ "epoch": 1.0943077740614549,
+ "grad_norm": 0.7583750486373901,
+ "learning_rate": 0.00014114235959726575,
+ "loss": 1.3064,
+ "step": 1094
+ },
+ {
+ "epoch": 1.0953080553905785,
+ "grad_norm": 0.6749271154403687,
+ "learning_rate": 0.0001410467952501114,
+ "loss": 1.4501,
+ "step": 1095
+ },
+ {
+ "epoch": 1.0963083367197024,
+ "grad_norm": 0.6708521842956543,
+ "learning_rate": 0.00014095118579995235,
+ "loss": 1.5046,
+ "step": 1096
+ },
+ {
+ "epoch": 1.0973086180488263,
+ "grad_norm": 0.5871726870536804,
+ "learning_rate": 0.0001408555313518461,
+ "loss": 1.3549,
+ "step": 1097
+ },
+ {
+ "epoch": 1.09830889937795,
+ "grad_norm": 0.6886669397354126,
+ "learning_rate": 0.00014075983201089964,
+ "loss": 1.3131,
+ "step": 1098
+ },
+ {
+ "epoch": 1.0993091807070738,
+ "grad_norm": 0.6243886351585388,
+ "learning_rate": 0.0001406640878822692,
+ "loss": 1.2278,
+ "step": 1099
+ },
+ {
+ "epoch": 1.1003094620361977,
+ "grad_norm": 0.7198624610900879,
+ "learning_rate": 0.00014056829907116024,
+ "loss": 1.4459,
+ "step": 1100
+ },
+ {
+ "epoch": 1.1013097433653214,
+ "grad_norm": 0.8059262037277222,
+ "learning_rate": 0.00014047246568282736,
+ "loss": 1.473,
+ "step": 1101
+ },
+ {
+ "epoch": 1.1023100246944453,
+ "grad_norm": 0.8409417271614075,
+ "learning_rate": 0.00014037658782257414,
+ "loss": 1.6688,
+ "step": 1102
+ },
+ {
+ "epoch": 1.1033103060235692,
+ "grad_norm": 0.739276111125946,
+ "learning_rate": 0.00014028066559575302,
+ "loss": 1.6182,
+ "step": 1103
+ },
+ {
+ "epoch": 1.104310587352693,
+ "grad_norm": 0.6282714009284973,
+ "learning_rate": 0.00014018469910776513,
+ "loss": 1.2835,
+ "step": 1104
+ },
+ {
+ "epoch": 1.1053108686818167,
+ "grad_norm": 0.7133497595787048,
+ "learning_rate": 0.0001400886884640603,
+ "loss": 1.5798,
+ "step": 1105
+ },
+ {
+ "epoch": 1.1063111500109406,
+ "grad_norm": 0.6376346945762634,
+ "learning_rate": 0.00013999263377013693,
+ "loss": 1.1436,
+ "step": 1106
+ },
+ {
+ "epoch": 1.1073114313400645,
+ "grad_norm": 0.5934734344482422,
+ "learning_rate": 0.00013989653513154165,
+ "loss": 1.3204,
+ "step": 1107
+ },
+ {
+ "epoch": 1.1083117126691882,
+ "grad_norm": 0.6655352115631104,
+ "learning_rate": 0.00013980039265386955,
+ "loss": 1.4602,
+ "step": 1108
+ },
+ {
+ "epoch": 1.109311993998312,
+ "grad_norm": 0.7147901058197021,
+ "learning_rate": 0.00013970420644276383,
+ "loss": 1.4124,
+ "step": 1109
+ },
+ {
+ "epoch": 1.110312275327436,
+ "grad_norm": 0.7845139503479004,
+ "learning_rate": 0.0001396079766039157,
+ "loss": 1.5831,
+ "step": 1110
+ },
+ {
+ "epoch": 1.1113125566565596,
+ "grad_norm": 0.8100587129592896,
+ "learning_rate": 0.00013951170324306435,
+ "loss": 1.6218,
+ "step": 1111
+ },
+ {
+ "epoch": 1.1123128379856835,
+ "grad_norm": 0.689988374710083,
+ "learning_rate": 0.00013941538646599687,
+ "loss": 1.2396,
+ "step": 1112
+ },
+ {
+ "epoch": 1.1133131193148074,
+ "grad_norm": 0.6771540641784668,
+ "learning_rate": 0.0001393190263785479,
+ "loss": 1.3739,
+ "step": 1113
+ },
+ {
+ "epoch": 1.114313400643931,
+ "grad_norm": 0.6424306631088257,
+ "learning_rate": 0.0001392226230865998,
+ "loss": 1.1653,
+ "step": 1114
+ },
+ {
+ "epoch": 1.115313681973055,
+ "grad_norm": 0.6135202646255493,
+ "learning_rate": 0.0001391261766960823,
+ "loss": 1.1924,
+ "step": 1115
+ },
+ {
+ "epoch": 1.1163139633021788,
+ "grad_norm": 0.6751917004585266,
+ "learning_rate": 0.00013902968731297255,
+ "loss": 1.4491,
+ "step": 1116
+ },
+ {
+ "epoch": 1.1173142446313025,
+ "grad_norm": 0.875303328037262,
+ "learning_rate": 0.00013893315504329498,
+ "loss": 1.3918,
+ "step": 1117
+ },
+ {
+ "epoch": 1.1183145259604264,
+ "grad_norm": 0.7102020978927612,
+ "learning_rate": 0.00013883657999312109,
+ "loss": 1.463,
+ "step": 1118
+ },
+ {
+ "epoch": 1.1193148072895502,
+ "grad_norm": 0.6863378882408142,
+ "learning_rate": 0.00013873996226856933,
+ "loss": 1.3958,
+ "step": 1119
+ },
+ {
+ "epoch": 1.120315088618674,
+ "grad_norm": 0.6769587397575378,
+ "learning_rate": 0.00013864330197580513,
+ "loss": 1.3044,
+ "step": 1120
+ },
+ {
+ "epoch": 1.1213153699477978,
+ "grad_norm": 0.7217769026756287,
+ "learning_rate": 0.0001385465992210407,
+ "loss": 1.6125,
+ "step": 1121
+ },
+ {
+ "epoch": 1.1223156512769217,
+ "grad_norm": 0.6756213903427124,
+ "learning_rate": 0.00013844985411053492,
+ "loss": 1.3658,
+ "step": 1122
+ },
+ {
+ "epoch": 1.1233159326060456,
+ "grad_norm": 0.7109145522117615,
+ "learning_rate": 0.00013835306675059308,
+ "loss": 1.5698,
+ "step": 1123
+ },
+ {
+ "epoch": 1.1243162139351692,
+ "grad_norm": 0.5903546810150146,
+ "learning_rate": 0.00013825623724756704,
+ "loss": 1.4429,
+ "step": 1124
+ },
+ {
+ "epoch": 1.1253164952642931,
+ "grad_norm": 0.7500163912773132,
+ "learning_rate": 0.00013815936570785487,
+ "loss": 1.2482,
+ "step": 1125
+ },
+ {
+ "epoch": 1.1263167765934168,
+ "grad_norm": 0.6458998918533325,
+ "learning_rate": 0.00013806245223790088,
+ "loss": 1.3496,
+ "step": 1126
+ },
+ {
+ "epoch": 1.1273170579225407,
+ "grad_norm": 0.627657413482666,
+ "learning_rate": 0.0001379654969441955,
+ "loss": 1.4847,
+ "step": 1127
+ },
+ {
+ "epoch": 1.1283173392516646,
+ "grad_norm": 0.7440046072006226,
+ "learning_rate": 0.000137868499933275,
+ "loss": 1.782,
+ "step": 1128
+ },
+ {
+ "epoch": 1.1293176205807884,
+ "grad_norm": 0.6717308163642883,
+ "learning_rate": 0.00013777146131172162,
+ "loss": 1.6345,
+ "step": 1129
+ },
+ {
+ "epoch": 1.130317901909912,
+ "grad_norm": 0.6480956673622131,
+ "learning_rate": 0.00013767438118616318,
+ "loss": 1.2862,
+ "step": 1130
+ },
+ {
+ "epoch": 1.131318183239036,
+ "grad_norm": 0.6778338551521301,
+ "learning_rate": 0.00013757725966327322,
+ "loss": 1.4821,
+ "step": 1131
+ },
+ {
+ "epoch": 1.1323184645681599,
+ "grad_norm": 0.6759636402130127,
+ "learning_rate": 0.00013748009684977073,
+ "loss": 1.5988,
+ "step": 1132
+ },
+ {
+ "epoch": 1.1333187458972835,
+ "grad_norm": 0.674404501914978,
+ "learning_rate": 0.0001373828928524201,
+ "loss": 1.4744,
+ "step": 1133
+ },
+ {
+ "epoch": 1.1343190272264074,
+ "grad_norm": 0.6017488241195679,
+ "learning_rate": 0.00013728564777803088,
+ "loss": 1.6296,
+ "step": 1134
+ },
+ {
+ "epoch": 1.1353193085555313,
+ "grad_norm": 0.6459933519363403,
+ "learning_rate": 0.00013718836173345783,
+ "loss": 1.5347,
+ "step": 1135
+ },
+ {
+ "epoch": 1.136319589884655,
+ "grad_norm": 0.6578681468963623,
+ "learning_rate": 0.00013709103482560078,
+ "loss": 1.4101,
+ "step": 1136
+ },
+ {
+ "epoch": 1.1373198712137789,
+ "grad_norm": 0.5906695127487183,
+ "learning_rate": 0.00013699366716140435,
+ "loss": 1.422,
+ "step": 1137
+ },
+ {
+ "epoch": 1.1383201525429028,
+ "grad_norm": 0.5622004866600037,
+ "learning_rate": 0.00013689625884785798,
+ "loss": 1.2805,
+ "step": 1138
+ },
+ {
+ "epoch": 1.1393204338720264,
+ "grad_norm": 0.7057269811630249,
+ "learning_rate": 0.00013679880999199583,
+ "loss": 1.5009,
+ "step": 1139
+ },
+ {
+ "epoch": 1.1403207152011503,
+ "grad_norm": 0.6655155420303345,
+ "learning_rate": 0.00013670132070089653,
+ "loss": 1.3504,
+ "step": 1140
+ },
+ {
+ "epoch": 1.1413209965302742,
+ "grad_norm": 0.6448667645454407,
+ "learning_rate": 0.00013660379108168324,
+ "loss": 1.4345,
+ "step": 1141
+ },
+ {
+ "epoch": 1.142321277859398,
+ "grad_norm": 0.6696295142173767,
+ "learning_rate": 0.00013650622124152334,
+ "loss": 1.3248,
+ "step": 1142
+ },
+ {
+ "epoch": 1.1433215591885217,
+ "grad_norm": 0.8913035988807678,
+ "learning_rate": 0.0001364086112876284,
+ "loss": 1.3148,
+ "step": 1143
+ },
+ {
+ "epoch": 1.1443218405176456,
+ "grad_norm": 0.6853944659233093,
+ "learning_rate": 0.00013631096132725413,
+ "loss": 1.4361,
+ "step": 1144
+ },
+ {
+ "epoch": 1.1453221218467693,
+ "grad_norm": 0.6286287307739258,
+ "learning_rate": 0.00013621327146770025,
+ "loss": 1.4485,
+ "step": 1145
+ },
+ {
+ "epoch": 1.1463224031758932,
+ "grad_norm": 0.6847277283668518,
+ "learning_rate": 0.00013611554181631013,
+ "loss": 1.4095,
+ "step": 1146
+ },
+ {
+ "epoch": 1.147322684505017,
+ "grad_norm": 0.6514857411384583,
+ "learning_rate": 0.00013601777248047105,
+ "loss": 1.4106,
+ "step": 1147
+ },
+ {
+ "epoch": 1.148322965834141,
+ "grad_norm": 0.6113057732582092,
+ "learning_rate": 0.0001359199635676138,
+ "loss": 1.3483,
+ "step": 1148
+ },
+ {
+ "epoch": 1.1493232471632646,
+ "grad_norm": 0.6366062164306641,
+ "learning_rate": 0.00013582211518521273,
+ "loss": 1.4164,
+ "step": 1149
+ },
+ {
+ "epoch": 1.1503235284923885,
+ "grad_norm": 0.6680336594581604,
+ "learning_rate": 0.00013572422744078551,
+ "loss": 1.5326,
+ "step": 1150
+ },
+ {
+ "epoch": 1.1513238098215124,
+ "grad_norm": 0.6046566367149353,
+ "learning_rate": 0.00013562630044189304,
+ "loss": 1.4139,
+ "step": 1151
+ },
+ {
+ "epoch": 1.152324091150636,
+ "grad_norm": 0.5869380235671997,
+ "learning_rate": 0.00013552833429613938,
+ "loss": 1.2859,
+ "step": 1152
+ },
+ {
+ "epoch": 1.15332437247976,
+ "grad_norm": 0.6923080682754517,
+ "learning_rate": 0.0001354303291111716,
+ "loss": 1.3419,
+ "step": 1153
+ },
+ {
+ "epoch": 1.1543246538088838,
+ "grad_norm": 0.698279082775116,
+ "learning_rate": 0.0001353322849946797,
+ "loss": 1.4796,
+ "step": 1154
+ },
+ {
+ "epoch": 1.1553249351380075,
+ "grad_norm": 0.6980450749397278,
+ "learning_rate": 0.00013523420205439646,
+ "loss": 1.5941,
+ "step": 1155
+ },
+ {
+ "epoch": 1.1563252164671314,
+ "grad_norm": 0.7222338914871216,
+ "learning_rate": 0.0001351360803980972,
+ "loss": 1.5019,
+ "step": 1156
+ },
+ {
+ "epoch": 1.1573254977962553,
+ "grad_norm": 0.6446929574012756,
+ "learning_rate": 0.00013503792013359997,
+ "loss": 1.2706,
+ "step": 1157
+ },
+ {
+ "epoch": 1.158325779125379,
+ "grad_norm": 0.699488639831543,
+ "learning_rate": 0.00013493972136876509,
+ "loss": 1.5775,
+ "step": 1158
+ },
+ {
+ "epoch": 1.1593260604545028,
+ "grad_norm": 0.6865110397338867,
+ "learning_rate": 0.00013484148421149527,
+ "loss": 1.5015,
+ "step": 1159
+ },
+ {
+ "epoch": 1.1603263417836267,
+ "grad_norm": 0.800570547580719,
+ "learning_rate": 0.0001347432087697354,
+ "loss": 1.401,
+ "step": 1160
+ },
+ {
+ "epoch": 1.1613266231127504,
+ "grad_norm": 0.706388533115387,
+ "learning_rate": 0.00013464489515147238,
+ "loss": 1.2895,
+ "step": 1161
+ },
+ {
+ "epoch": 1.1623269044418743,
+ "grad_norm": 0.7967466711997986,
+ "learning_rate": 0.0001345465434647351,
+ "loss": 1.848,
+ "step": 1162
+ },
+ {
+ "epoch": 1.1633271857709981,
+ "grad_norm": 0.7130827903747559,
+ "learning_rate": 0.00013444815381759425,
+ "loss": 1.2899,
+ "step": 1163
+ },
+ {
+ "epoch": 1.1643274671001218,
+ "grad_norm": 0.6437693238258362,
+ "learning_rate": 0.00013434972631816235,
+ "loss": 1.4456,
+ "step": 1164
+ },
+ {
+ "epoch": 1.1653277484292457,
+ "grad_norm": 0.6305271983146667,
+ "learning_rate": 0.0001342512610745933,
+ "loss": 1.3375,
+ "step": 1165
+ },
+ {
+ "epoch": 1.1663280297583696,
+ "grad_norm": 0.6622384786605835,
+ "learning_rate": 0.0001341527581950827,
+ "loss": 1.6379,
+ "step": 1166
+ },
+ {
+ "epoch": 1.1673283110874935,
+ "grad_norm": 0.64511638879776,
+ "learning_rate": 0.00013405421778786737,
+ "loss": 1.2818,
+ "step": 1167
+ },
+ {
+ "epoch": 1.1683285924166171,
+ "grad_norm": 0.6575477719306946,
+ "learning_rate": 0.00013395563996122537,
+ "loss": 1.2264,
+ "step": 1168
+ },
+ {
+ "epoch": 1.169328873745741,
+ "grad_norm": 0.787896990776062,
+ "learning_rate": 0.00013385702482347593,
+ "loss": 1.6519,
+ "step": 1169
+ },
+ {
+ "epoch": 1.170329155074865,
+ "grad_norm": 0.7512592077255249,
+ "learning_rate": 0.00013375837248297926,
+ "loss": 1.4776,
+ "step": 1170
+ },
+ {
+ "epoch": 1.1713294364039886,
+ "grad_norm": 0.9541054964065552,
+ "learning_rate": 0.0001336596830481364,
+ "loss": 1.5383,
+ "step": 1171
+ },
+ {
+ "epoch": 1.1723297177331125,
+ "grad_norm": 0.7181218266487122,
+ "learning_rate": 0.0001335609566273892,
+ "loss": 1.4088,
+ "step": 1172
+ },
+ {
+ "epoch": 1.1733299990622363,
+ "grad_norm": 0.7449761629104614,
+ "learning_rate": 0.00013346219332922016,
+ "loss": 1.2313,
+ "step": 1173
+ },
+ {
+ "epoch": 1.17433028039136,
+ "grad_norm": 0.7783718705177307,
+ "learning_rate": 0.00013336339326215228,
+ "loss": 1.4114,
+ "step": 1174
+ },
+ {
+ "epoch": 1.175330561720484,
+ "grad_norm": 0.8479213714599609,
+ "learning_rate": 0.00013326455653474897,
+ "loss": 1.6336,
+ "step": 1175
+ },
+ {
+ "epoch": 1.1763308430496078,
+ "grad_norm": 0.7903116941452026,
+ "learning_rate": 0.00013316568325561393,
+ "loss": 1.3251,
+ "step": 1176
+ },
+ {
+ "epoch": 1.1773311243787314,
+ "grad_norm": 0.7016749978065491,
+ "learning_rate": 0.00013306677353339098,
+ "loss": 1.5013,
+ "step": 1177
+ },
+ {
+ "epoch": 1.1783314057078553,
+ "grad_norm": 0.6183115839958191,
+ "learning_rate": 0.000132967827476764,
+ "loss": 1.3984,
+ "step": 1178
+ },
+ {
+ "epoch": 1.1793316870369792,
+ "grad_norm": 0.6227801442146301,
+ "learning_rate": 0.0001328688451944569,
+ "loss": 1.2306,
+ "step": 1179
+ },
+ {
+ "epoch": 1.1803319683661029,
+ "grad_norm": 0.7611119151115417,
+ "learning_rate": 0.00013276982679523322,
+ "loss": 1.3648,
+ "step": 1180
+ },
+ {
+ "epoch": 1.1813322496952268,
+ "grad_norm": 0.6929368376731873,
+ "learning_rate": 0.00013267077238789633,
+ "loss": 1.5107,
+ "step": 1181
+ },
+ {
+ "epoch": 1.1823325310243507,
+ "grad_norm": 0.6637099385261536,
+ "learning_rate": 0.00013257168208128908,
+ "loss": 1.5103,
+ "step": 1182
+ },
+ {
+ "epoch": 1.1833328123534743,
+ "grad_norm": 0.6320933103561401,
+ "learning_rate": 0.00013247255598429378,
+ "loss": 1.4024,
+ "step": 1183
+ },
+ {
+ "epoch": 1.1843330936825982,
+ "grad_norm": 0.8260888457298279,
+ "learning_rate": 0.00013237339420583212,
+ "loss": 1.4385,
+ "step": 1184
+ },
+ {
+ "epoch": 1.185333375011722,
+ "grad_norm": 0.7872930765151978,
+ "learning_rate": 0.00013227419685486492,
+ "loss": 1.2566,
+ "step": 1185
+ },
+ {
+ "epoch": 1.186333656340846,
+ "grad_norm": 0.6857215762138367,
+ "learning_rate": 0.00013217496404039218,
+ "loss": 1.619,
+ "step": 1186
+ },
+ {
+ "epoch": 1.1873339376699696,
+ "grad_norm": 0.708858072757721,
+ "learning_rate": 0.0001320756958714528,
+ "loss": 1.2228,
+ "step": 1187
+ },
+ {
+ "epoch": 1.1883342189990935,
+ "grad_norm": 0.6442694067955017,
+ "learning_rate": 0.00013197639245712454,
+ "loss": 1.5542,
+ "step": 1188
+ },
+ {
+ "epoch": 1.1893345003282172,
+ "grad_norm": 0.5615749359130859,
+ "learning_rate": 0.00013187705390652388,
+ "loss": 1.5289,
+ "step": 1189
+ },
+ {
+ "epoch": 1.190334781657341,
+ "grad_norm": 0.6250069737434387,
+ "learning_rate": 0.00013177768032880593,
+ "loss": 1.3849,
+ "step": 1190
+ },
+ {
+ "epoch": 1.191335062986465,
+ "grad_norm": 0.6521658301353455,
+ "learning_rate": 0.0001316782718331643,
+ "loss": 1.4118,
+ "step": 1191
+ },
+ {
+ "epoch": 1.1923353443155889,
+ "grad_norm": 0.7188206315040588,
+ "learning_rate": 0.0001315788285288309,
+ "loss": 1.4436,
+ "step": 1192
+ },
+ {
+ "epoch": 1.1933356256447125,
+ "grad_norm": 0.7098423838615417,
+ "learning_rate": 0.00013147935052507597,
+ "loss": 1.3339,
+ "step": 1193
+ },
+ {
+ "epoch": 1.1943359069738364,
+ "grad_norm": 0.655750036239624,
+ "learning_rate": 0.00013137983793120786,
+ "loss": 1.4208,
+ "step": 1194
+ },
+ {
+ "epoch": 1.1953361883029603,
+ "grad_norm": 0.640650749206543,
+ "learning_rate": 0.0001312802908565729,
+ "loss": 1.7209,
+ "step": 1195
+ },
+ {
+ "epoch": 1.196336469632084,
+ "grad_norm": 0.601091206073761,
+ "learning_rate": 0.0001311807094105553,
+ "loss": 1.4339,
+ "step": 1196
+ },
+ {
+ "epoch": 1.1973367509612078,
+ "grad_norm": 0.5894292593002319,
+ "learning_rate": 0.00013108109370257712,
+ "loss": 1.4687,
+ "step": 1197
+ },
+ {
+ "epoch": 1.1983370322903317,
+ "grad_norm": 0.7012053728103638,
+ "learning_rate": 0.00013098144384209796,
+ "loss": 1.5834,
+ "step": 1198
+ },
+ {
+ "epoch": 1.1993373136194554,
+ "grad_norm": 0.636356770992279,
+ "learning_rate": 0.000130881759938615,
+ "loss": 1.1723,
+ "step": 1199
+ },
+ {
+ "epoch": 1.2003375949485793,
+ "grad_norm": 0.6913763284683228,
+ "learning_rate": 0.00013078204210166278,
+ "loss": 1.3327,
+ "step": 1200
+ },
+ {
+ "epoch": 1.2013378762777032,
+ "grad_norm": 0.7067025303840637,
+ "learning_rate": 0.00013068229044081324,
+ "loss": 1.4671,
+ "step": 1201
+ },
+ {
+ "epoch": 1.2023381576068268,
+ "grad_norm": 0.698302149772644,
+ "learning_rate": 0.0001305825050656754,
+ "loss": 1.601,
+ "step": 1202
+ },
+ {
+ "epoch": 1.2033384389359507,
+ "grad_norm": 0.6283687949180603,
+ "learning_rate": 0.00013048268608589533,
+ "loss": 1.4164,
+ "step": 1203
+ },
+ {
+ "epoch": 1.2043387202650746,
+ "grad_norm": 0.6440062522888184,
+ "learning_rate": 0.00013038283361115603,
+ "loss": 1.3725,
+ "step": 1204
+ },
+ {
+ "epoch": 1.2053390015941985,
+ "grad_norm": 0.726294994354248,
+ "learning_rate": 0.0001302829477511773,
+ "loss": 1.4557,
+ "step": 1205
+ },
+ {
+ "epoch": 1.2063392829233222,
+ "grad_norm": 0.5984981060028076,
+ "learning_rate": 0.0001301830286157157,
+ "loss": 1.3455,
+ "step": 1206
+ },
+ {
+ "epoch": 1.207339564252446,
+ "grad_norm": 0.6096123456954956,
+ "learning_rate": 0.0001300830763145642,
+ "loss": 1.3562,
+ "step": 1207
+ },
+ {
+ "epoch": 1.2083398455815697,
+ "grad_norm": 0.815022349357605,
+ "learning_rate": 0.00012998309095755235,
+ "loss": 1.5287,
+ "step": 1208
+ },
+ {
+ "epoch": 1.2093401269106936,
+ "grad_norm": 0.990715742111206,
+ "learning_rate": 0.00012988307265454597,
+ "loss": 1.4186,
+ "step": 1209
+ },
+ {
+ "epoch": 1.2103404082398175,
+ "grad_norm": 0.8175992369651794,
+ "learning_rate": 0.0001297830215154471,
+ "loss": 1.3145,
+ "step": 1210
+ },
+ {
+ "epoch": 1.2113406895689414,
+ "grad_norm": 0.734703779220581,
+ "learning_rate": 0.00012968293765019384,
+ "loss": 1.359,
+ "step": 1211
+ },
+ {
+ "epoch": 1.212340970898065,
+ "grad_norm": 0.7687499523162842,
+ "learning_rate": 0.00012958282116876026,
+ "loss": 1.517,
+ "step": 1212
+ },
+ {
+ "epoch": 1.213341252227189,
+ "grad_norm": 0.5819374918937683,
+ "learning_rate": 0.00012948267218115624,
+ "loss": 1.353,
+ "step": 1213
+ },
+ {
+ "epoch": 1.2143415335563128,
+ "grad_norm": 0.6273573040962219,
+ "learning_rate": 0.00012938249079742743,
+ "loss": 1.3114,
+ "step": 1214
+ },
+ {
+ "epoch": 1.2153418148854365,
+ "grad_norm": 0.8144434094429016,
+ "learning_rate": 0.00012928227712765504,
+ "loss": 1.4763,
+ "step": 1215
+ },
+ {
+ "epoch": 1.2163420962145604,
+ "grad_norm": 0.6941531300544739,
+ "learning_rate": 0.0001291820312819558,
+ "loss": 1.3386,
+ "step": 1216
+ },
+ {
+ "epoch": 1.2173423775436842,
+ "grad_norm": 0.6548559069633484,
+ "learning_rate": 0.00012908175337048174,
+ "loss": 1.4763,
+ "step": 1217
+ },
+ {
+ "epoch": 1.218342658872808,
+ "grad_norm": 0.6587492227554321,
+ "learning_rate": 0.00012898144350342015,
+ "loss": 1.5342,
+ "step": 1218
+ },
+ {
+ "epoch": 1.2193429402019318,
+ "grad_norm": 0.6187465190887451,
+ "learning_rate": 0.0001288811017909934,
+ "loss": 1.5109,
+ "step": 1219
+ },
+ {
+ "epoch": 1.2203432215310557,
+ "grad_norm": 0.6152673959732056,
+ "learning_rate": 0.00012878072834345895,
+ "loss": 1.2812,
+ "step": 1220
+ },
+ {
+ "epoch": 1.2213435028601793,
+ "grad_norm": 0.6489769816398621,
+ "learning_rate": 0.00012868032327110904,
+ "loss": 1.4179,
+ "step": 1221
+ },
+ {
+ "epoch": 1.2223437841893032,
+ "grad_norm": 0.6900584697723389,
+ "learning_rate": 0.00012857988668427066,
+ "loss": 1.5862,
+ "step": 1222
+ },
+ {
+ "epoch": 1.2233440655184271,
+ "grad_norm": 0.7858991026878357,
+ "learning_rate": 0.0001284794186933055,
+ "loss": 1.5595,
+ "step": 1223
+ },
+ {
+ "epoch": 1.2243443468475508,
+ "grad_norm": 0.6736376285552979,
+ "learning_rate": 0.00012837891940860972,
+ "loss": 1.6053,
+ "step": 1224
+ },
+ {
+ "epoch": 1.2253446281766747,
+ "grad_norm": 0.6328126192092896,
+ "learning_rate": 0.00012827838894061377,
+ "loss": 1.5157,
+ "step": 1225
+ },
+ {
+ "epoch": 1.2263449095057986,
+ "grad_norm": 0.6681983470916748,
+ "learning_rate": 0.00012817782739978255,
+ "loss": 1.3832,
+ "step": 1226
+ },
+ {
+ "epoch": 1.2273451908349222,
+ "grad_norm": 0.6474846601486206,
+ "learning_rate": 0.00012807723489661495,
+ "loss": 1.4772,
+ "step": 1227
+ },
+ {
+ "epoch": 1.228345472164046,
+ "grad_norm": 0.7181409597396851,
+ "learning_rate": 0.00012797661154164395,
+ "loss": 1.4942,
+ "step": 1228
+ },
+ {
+ "epoch": 1.22934575349317,
+ "grad_norm": 0.8670255541801453,
+ "learning_rate": 0.00012787595744543647,
+ "loss": 1.3818,
+ "step": 1229
+ },
+ {
+ "epoch": 1.2303460348222939,
+ "grad_norm": 0.7007678747177124,
+ "learning_rate": 0.00012777527271859307,
+ "loss": 1.4428,
+ "step": 1230
+ },
+ {
+ "epoch": 1.2313463161514175,
+ "grad_norm": 0.5932227373123169,
+ "learning_rate": 0.0001276745574717481,
+ "loss": 1.1946,
+ "step": 1231
+ },
+ {
+ "epoch": 1.2323465974805414,
+ "grad_norm": 0.6067792177200317,
+ "learning_rate": 0.00012757381181556943,
+ "loss": 1.1963,
+ "step": 1232
+ },
+ {
+ "epoch": 1.2333468788096653,
+ "grad_norm": 0.6702238917350769,
+ "learning_rate": 0.0001274730358607583,
+ "loss": 1.4489,
+ "step": 1233
+ },
+ {
+ "epoch": 1.234347160138789,
+ "grad_norm": 0.6660708785057068,
+ "learning_rate": 0.00012737222971804924,
+ "loss": 1.4307,
+ "step": 1234
+ },
+ {
+ "epoch": 1.2353474414679129,
+ "grad_norm": 0.6711790561676025,
+ "learning_rate": 0.00012727139349821,
+ "loss": 1.4167,
+ "step": 1235
+ },
+ {
+ "epoch": 1.2363477227970368,
+ "grad_norm": 0.7145324349403381,
+ "learning_rate": 0.0001271705273120413,
+ "loss": 1.4125,
+ "step": 1236
+ },
+ {
+ "epoch": 1.2373480041261604,
+ "grad_norm": 0.6572199463844299,
+ "learning_rate": 0.00012706963127037685,
+ "loss": 1.2403,
+ "step": 1237
+ },
+ {
+ "epoch": 1.2383482854552843,
+ "grad_norm": 0.634173572063446,
+ "learning_rate": 0.00012696870548408316,
+ "loss": 1.2861,
+ "step": 1238
+ },
+ {
+ "epoch": 1.2393485667844082,
+ "grad_norm": 0.6740716695785522,
+ "learning_rate": 0.00012686775006405946,
+ "loss": 1.6377,
+ "step": 1239
+ },
+ {
+ "epoch": 1.2403488481135319,
+ "grad_norm": 0.7334823608398438,
+ "learning_rate": 0.00012676676512123747,
+ "loss": 1.4186,
+ "step": 1240
+ },
+ {
+ "epoch": 1.2413491294426557,
+ "grad_norm": 0.9025078415870667,
+ "learning_rate": 0.00012666575076658134,
+ "loss": 1.4633,
+ "step": 1241
+ },
+ {
+ "epoch": 1.2423494107717796,
+ "grad_norm": 0.7449138760566711,
+ "learning_rate": 0.00012656470711108764,
+ "loss": 1.5294,
+ "step": 1242
+ },
+ {
+ "epoch": 1.2433496921009033,
+ "grad_norm": 0.6459099054336548,
+ "learning_rate": 0.00012646363426578505,
+ "loss": 1.4404,
+ "step": 1243
+ },
+ {
+ "epoch": 1.2443499734300272,
+ "grad_norm": 0.762955904006958,
+ "learning_rate": 0.0001263625323417343,
+ "loss": 1.6486,
+ "step": 1244
+ },
+ {
+ "epoch": 1.245350254759151,
+ "grad_norm": 0.6792619228363037,
+ "learning_rate": 0.0001262614014500282,
+ "loss": 1.3946,
+ "step": 1245
+ },
+ {
+ "epoch": 1.2463505360882747,
+ "grad_norm": 0.7938026189804077,
+ "learning_rate": 0.00012616024170179126,
+ "loss": 1.5163,
+ "step": 1246
+ },
+ {
+ "epoch": 1.2473508174173986,
+ "grad_norm": 0.6673023104667664,
+ "learning_rate": 0.00012605905320817976,
+ "loss": 1.5599,
+ "step": 1247
+ },
+ {
+ "epoch": 1.2483510987465225,
+ "grad_norm": 0.6543686389923096,
+ "learning_rate": 0.00012595783608038155,
+ "loss": 1.3298,
+ "step": 1248
+ },
+ {
+ "epoch": 1.2493513800756464,
+ "grad_norm": 0.6570146083831787,
+ "learning_rate": 0.00012585659042961596,
+ "loss": 1.2528,
+ "step": 1249
+ },
+ {
+ "epoch": 1.25035166140477,
+ "grad_norm": 0.6886934638023376,
+ "learning_rate": 0.00012575531636713368,
+ "loss": 1.4567,
+ "step": 1250
+ },
+ {
+ "epoch": 1.251351942733894,
+ "grad_norm": 0.660229504108429,
+ "learning_rate": 0.00012565401400421651,
+ "loss": 1.3333,
+ "step": 1251
+ },
+ {
+ "epoch": 1.2523522240630176,
+ "grad_norm": 0.7553595900535583,
+ "learning_rate": 0.0001255526834521775,
+ "loss": 1.4152,
+ "step": 1252
+ },
+ {
+ "epoch": 1.2533525053921415,
+ "grad_norm": 0.6283079385757446,
+ "learning_rate": 0.00012545132482236055,
+ "loss": 1.3201,
+ "step": 1253
+ },
+ {
+ "epoch": 1.2543527867212654,
+ "grad_norm": 0.6704882383346558,
+ "learning_rate": 0.0001253499382261405,
+ "loss": 1.3711,
+ "step": 1254
+ },
+ {
+ "epoch": 1.2553530680503893,
+ "grad_norm": 0.7282963991165161,
+ "learning_rate": 0.00012524852377492285,
+ "loss": 1.5492,
+ "step": 1255
+ },
+ {
+ "epoch": 1.256353349379513,
+ "grad_norm": 0.7712034583091736,
+ "learning_rate": 0.00012514708158014378,
+ "loss": 1.4299,
+ "step": 1256
+ },
+ {
+ "epoch": 1.2573536307086368,
+ "grad_norm": 0.6231324076652527,
+ "learning_rate": 0.00012504561175326985,
+ "loss": 1.3492,
+ "step": 1257
+ },
+ {
+ "epoch": 1.2583539120377607,
+ "grad_norm": 0.7122861742973328,
+ "learning_rate": 0.00012494411440579814,
+ "loss": 1.4583,
+ "step": 1258
+ },
+ {
+ "epoch": 1.2593541933668844,
+ "grad_norm": 0.7797596454620361,
+ "learning_rate": 0.0001248425896492558,
+ "loss": 1.4185,
+ "step": 1259
+ },
+ {
+ "epoch": 1.2603544746960083,
+ "grad_norm": 0.7058592438697815,
+ "learning_rate": 0.00012474103759520027,
+ "loss": 1.7918,
+ "step": 1260
+ },
+ {
+ "epoch": 1.2613547560251321,
+ "grad_norm": 0.6629828214645386,
+ "learning_rate": 0.00012463945835521878,
+ "loss": 1.2547,
+ "step": 1261
+ },
+ {
+ "epoch": 1.2623550373542558,
+ "grad_norm": 0.6975031495094299,
+ "learning_rate": 0.0001245378520409286,
+ "loss": 1.6256,
+ "step": 1262
+ },
+ {
+ "epoch": 1.2633553186833797,
+ "grad_norm": 0.7939541935920715,
+ "learning_rate": 0.0001244362187639767,
+ "loss": 1.2817,
+ "step": 1263
+ },
+ {
+ "epoch": 1.2643556000125036,
+ "grad_norm": 0.7042918801307678,
+ "learning_rate": 0.00012433455863603967,
+ "loss": 1.4207,
+ "step": 1264
+ },
+ {
+ "epoch": 1.2653558813416272,
+ "grad_norm": 0.6327396631240845,
+ "learning_rate": 0.00012423287176882358,
+ "loss": 1.3927,
+ "step": 1265
+ },
+ {
+ "epoch": 1.2663561626707511,
+ "grad_norm": 0.5797626376152039,
+ "learning_rate": 0.00012413115827406392,
+ "loss": 1.2808,
+ "step": 1266
+ },
+ {
+ "epoch": 1.267356443999875,
+ "grad_norm": 0.6891800165176392,
+ "learning_rate": 0.00012402941826352546,
+ "loss": 1.2385,
+ "step": 1267
+ },
+ {
+ "epoch": 1.268356725328999,
+ "grad_norm": 0.6648104190826416,
+ "learning_rate": 0.00012392765184900202,
+ "loss": 1.5402,
+ "step": 1268
+ },
+ {
+ "epoch": 1.2693570066581226,
+ "grad_norm": 0.7340229749679565,
+ "learning_rate": 0.0001238258591423165,
+ "loss": 1.2926,
+ "step": 1269
+ },
+ {
+ "epoch": 1.2703572879872465,
+ "grad_norm": 0.670605480670929,
+ "learning_rate": 0.00012372404025532072,
+ "loss": 1.3863,
+ "step": 1270
+ },
+ {
+ "epoch": 1.2713575693163701,
+ "grad_norm": 0.7016957402229309,
+ "learning_rate": 0.00012362219529989514,
+ "loss": 1.7362,
+ "step": 1271
+ },
+ {
+ "epoch": 1.272357850645494,
+ "grad_norm": 0.6318536400794983,
+ "learning_rate": 0.00012352032438794902,
+ "loss": 1.4278,
+ "step": 1272
+ },
+ {
+ "epoch": 1.273358131974618,
+ "grad_norm": 0.5807138681411743,
+ "learning_rate": 0.00012341842763142005,
+ "loss": 1.4762,
+ "step": 1273
+ },
+ {
+ "epoch": 1.2743584133037418,
+ "grad_norm": 0.6634588241577148,
+ "learning_rate": 0.00012331650514227425,
+ "loss": 1.6209,
+ "step": 1274
+ },
+ {
+ "epoch": 1.2753586946328654,
+ "grad_norm": 0.709530770778656,
+ "learning_rate": 0.00012321455703250616,
+ "loss": 1.5209,
+ "step": 1275
+ },
+ {
+ "epoch": 1.2763589759619893,
+ "grad_norm": 0.678584098815918,
+ "learning_rate": 0.00012311258341413822,
+ "loss": 1.4247,
+ "step": 1276
+ },
+ {
+ "epoch": 1.277359257291113,
+ "grad_norm": 0.6134077310562134,
+ "learning_rate": 0.00012301058439922102,
+ "loss": 1.3792,
+ "step": 1277
+ },
+ {
+ "epoch": 1.2783595386202369,
+ "grad_norm": 0.694976806640625,
+ "learning_rate": 0.000122908560099833,
+ "loss": 1.5957,
+ "step": 1278
+ },
+ {
+ "epoch": 1.2793598199493608,
+ "grad_norm": 0.835444986820221,
+ "learning_rate": 0.00012280651062808047,
+ "loss": 1.4917,
+ "step": 1279
+ },
+ {
+ "epoch": 1.2803601012784847,
+ "grad_norm": 0.6491605043411255,
+ "learning_rate": 0.00012270443609609729,
+ "loss": 1.4417,
+ "step": 1280
+ },
+ {
+ "epoch": 1.2813603826076083,
+ "grad_norm": 0.6651148200035095,
+ "learning_rate": 0.0001226023366160449,
+ "loss": 1.2857,
+ "step": 1281
+ },
+ {
+ "epoch": 1.2823606639367322,
+ "grad_norm": 0.6736195683479309,
+ "learning_rate": 0.00012250021230011225,
+ "loss": 1.3431,
+ "step": 1282
+ },
+ {
+ "epoch": 1.283360945265856,
+ "grad_norm": 0.7162345051765442,
+ "learning_rate": 0.00012239806326051539,
+ "loss": 1.4009,
+ "step": 1283
+ },
+ {
+ "epoch": 1.2843612265949798,
+ "grad_norm": 0.8337399363517761,
+ "learning_rate": 0.00012229588960949771,
+ "loss": 1.5303,
+ "step": 1284
+ },
+ {
+ "epoch": 1.2853615079241036,
+ "grad_norm": 0.6648454666137695,
+ "learning_rate": 0.00012219369145932959,
+ "loss": 1.7088,
+ "step": 1285
+ },
+ {
+ "epoch": 1.2863617892532275,
+ "grad_norm": 0.6891435384750366,
+ "learning_rate": 0.00012209146892230822,
+ "loss": 1.4053,
+ "step": 1286
+ },
+ {
+ "epoch": 1.2873620705823514,
+ "grad_norm": 0.6659008860588074,
+ "learning_rate": 0.00012198922211075778,
+ "loss": 1.3959,
+ "step": 1287
+ },
+ {
+ "epoch": 1.288362351911475,
+ "grad_norm": 0.6926385164260864,
+ "learning_rate": 0.00012188695113702896,
+ "loss": 1.536,
+ "step": 1288
+ },
+ {
+ "epoch": 1.289362633240599,
+ "grad_norm": 0.6584843397140503,
+ "learning_rate": 0.00012178465611349911,
+ "loss": 1.5099,
+ "step": 1289
+ },
+ {
+ "epoch": 1.2903629145697226,
+ "grad_norm": 0.7430850267410278,
+ "learning_rate": 0.00012168233715257194,
+ "loss": 1.3367,
+ "step": 1290
+ },
+ {
+ "epoch": 1.2913631958988465,
+ "grad_norm": 0.8379004597663879,
+ "learning_rate": 0.00012157999436667747,
+ "loss": 1.3542,
+ "step": 1291
+ },
+ {
+ "epoch": 1.2923634772279704,
+ "grad_norm": 0.7217230796813965,
+ "learning_rate": 0.00012147762786827193,
+ "loss": 1.4465,
+ "step": 1292
+ },
+ {
+ "epoch": 1.2933637585570943,
+ "grad_norm": 0.7268504500389099,
+ "learning_rate": 0.00012137523776983757,
+ "loss": 1.2616,
+ "step": 1293
+ },
+ {
+ "epoch": 1.294364039886218,
+ "grad_norm": 0.7402834296226501,
+ "learning_rate": 0.00012127282418388264,
+ "loss": 1.271,
+ "step": 1294
+ },
+ {
+ "epoch": 1.2953643212153418,
+ "grad_norm": 0.6314610242843628,
+ "learning_rate": 0.0001211703872229411,
+ "loss": 1.3718,
+ "step": 1295
+ },
+ {
+ "epoch": 1.2963646025444655,
+ "grad_norm": 0.6106632947921753,
+ "learning_rate": 0.00012106792699957263,
+ "loss": 1.6404,
+ "step": 1296
+ },
+ {
+ "epoch": 1.2973648838735894,
+ "grad_norm": 0.6568691730499268,
+ "learning_rate": 0.00012096544362636255,
+ "loss": 1.3559,
+ "step": 1297
+ },
+ {
+ "epoch": 1.2983651652027133,
+ "grad_norm": 0.700645387172699,
+ "learning_rate": 0.00012086293721592152,
+ "loss": 1.5258,
+ "step": 1298
+ },
+ {
+ "epoch": 1.2993654465318372,
+ "grad_norm": 1.0722559690475464,
+ "learning_rate": 0.00012076040788088554,
+ "loss": 1.4921,
+ "step": 1299
+ },
+ {
+ "epoch": 1.3003657278609608,
+ "grad_norm": 0.6164102554321289,
+ "learning_rate": 0.00012065785573391581,
+ "loss": 1.3766,
+ "step": 1300
+ },
+ {
+ "epoch": 1.3013660091900847,
+ "grad_norm": 0.7066829204559326,
+ "learning_rate": 0.00012055528088769861,
+ "loss": 1.4322,
+ "step": 1301
+ },
+ {
+ "epoch": 1.3023662905192086,
+ "grad_norm": 0.6311153769493103,
+ "learning_rate": 0.00012045268345494511,
+ "loss": 1.2958,
+ "step": 1302
+ },
+ {
+ "epoch": 1.3033665718483323,
+ "grad_norm": 0.6254247426986694,
+ "learning_rate": 0.00012035006354839133,
+ "loss": 1.3125,
+ "step": 1303
+ },
+ {
+ "epoch": 1.3043668531774562,
+ "grad_norm": 0.7812719941139221,
+ "learning_rate": 0.00012024742128079805,
+ "loss": 1.5446,
+ "step": 1304
+ },
+ {
+ "epoch": 1.30536713450658,
+ "grad_norm": 0.6067742109298706,
+ "learning_rate": 0.00012014475676495052,
+ "loss": 1.2821,
+ "step": 1305
+ },
+ {
+ "epoch": 1.306367415835704,
+ "grad_norm": 0.6812780499458313,
+ "learning_rate": 0.00012004207011365849,
+ "loss": 1.2988,
+ "step": 1306
+ },
+ {
+ "epoch": 1.3073676971648276,
+ "grad_norm": 0.6978224515914917,
+ "learning_rate": 0.00011993936143975599,
+ "loss": 1.3677,
+ "step": 1307
+ },
+ {
+ "epoch": 1.3083679784939515,
+ "grad_norm": 0.660102903842926,
+ "learning_rate": 0.00011983663085610131,
+ "loss": 1.3688,
+ "step": 1308
+ },
+ {
+ "epoch": 1.3093682598230751,
+ "grad_norm": 0.706007182598114,
+ "learning_rate": 0.00011973387847557676,
+ "loss": 1.2638,
+ "step": 1309
+ },
+ {
+ "epoch": 1.310368541152199,
+ "grad_norm": 0.6711030602455139,
+ "learning_rate": 0.00011963110441108863,
+ "loss": 1.2569,
+ "step": 1310
+ },
+ {
+ "epoch": 1.311368822481323,
+ "grad_norm": 0.6825215816497803,
+ "learning_rate": 0.000119528308775567,
+ "loss": 1.3912,
+ "step": 1311
+ },
+ {
+ "epoch": 1.3123691038104468,
+ "grad_norm": 0.725979745388031,
+ "learning_rate": 0.00011942549168196575,
+ "loss": 1.331,
+ "step": 1312
+ },
+ {
+ "epoch": 1.3133693851395705,
+ "grad_norm": 0.6699597835540771,
+ "learning_rate": 0.00011932265324326221,
+ "loss": 1.4171,
+ "step": 1313
+ },
+ {
+ "epoch": 1.3143696664686944,
+ "grad_norm": 0.6116276383399963,
+ "learning_rate": 0.0001192197935724573,
+ "loss": 1.3333,
+ "step": 1314
+ },
+ {
+ "epoch": 1.315369947797818,
+ "grad_norm": 0.6644623875617981,
+ "learning_rate": 0.00011911691278257511,
+ "loss": 1.5515,
+ "step": 1315
+ },
+ {
+ "epoch": 1.316370229126942,
+ "grad_norm": 0.6456226110458374,
+ "learning_rate": 0.0001190140109866631,
+ "loss": 1.2691,
+ "step": 1316
+ },
+ {
+ "epoch": 1.3173705104560658,
+ "grad_norm": 0.6665071249008179,
+ "learning_rate": 0.00011891108829779165,
+ "loss": 1.3782,
+ "step": 1317
+ },
+ {
+ "epoch": 1.3183707917851897,
+ "grad_norm": 0.7108166813850403,
+ "learning_rate": 0.00011880814482905422,
+ "loss": 1.3122,
+ "step": 1318
+ },
+ {
+ "epoch": 1.3193710731143133,
+ "grad_norm": 0.7184962630271912,
+ "learning_rate": 0.00011870518069356709,
+ "loss": 1.3624,
+ "step": 1319
+ },
+ {
+ "epoch": 1.3203713544434372,
+ "grad_norm": 0.6516618132591248,
+ "learning_rate": 0.0001186021960044692,
+ "loss": 1.5654,
+ "step": 1320
+ },
+ {
+ "epoch": 1.3213716357725611,
+ "grad_norm": 0.6597375869750977,
+ "learning_rate": 0.00011849919087492211,
+ "loss": 1.4765,
+ "step": 1321
+ },
+ {
+ "epoch": 1.3223719171016848,
+ "grad_norm": 0.8731528520584106,
+ "learning_rate": 0.00011839616541810983,
+ "loss": 1.3876,
+ "step": 1322
+ },
+ {
+ "epoch": 1.3233721984308087,
+ "grad_norm": 0.6694337725639343,
+ "learning_rate": 0.00011829311974723867,
+ "loss": 1.327,
+ "step": 1323
+ },
+ {
+ "epoch": 1.3243724797599326,
+ "grad_norm": 0.7454182505607605,
+ "learning_rate": 0.00011819005397553723,
+ "loss": 1.4202,
+ "step": 1324
+ },
+ {
+ "epoch": 1.3253727610890562,
+ "grad_norm": 0.6783546209335327,
+ "learning_rate": 0.00011808696821625613,
+ "loss": 1.3966,
+ "step": 1325
+ },
+ {
+ "epoch": 1.32637304241818,
+ "grad_norm": 0.7126333713531494,
+ "learning_rate": 0.000117983862582668,
+ "loss": 1.3896,
+ "step": 1326
+ },
+ {
+ "epoch": 1.327373323747304,
+ "grad_norm": 0.6765009164810181,
+ "learning_rate": 0.00011788073718806725,
+ "loss": 1.3678,
+ "step": 1327
+ },
+ {
+ "epoch": 1.3283736050764277,
+ "grad_norm": 0.6767436861991882,
+ "learning_rate": 0.00011777759214577006,
+ "loss": 1.497,
+ "step": 1328
+ },
+ {
+ "epoch": 1.3293738864055515,
+ "grad_norm": 0.7058733105659485,
+ "learning_rate": 0.00011767442756911417,
+ "loss": 1.4992,
+ "step": 1329
+ },
+ {
+ "epoch": 1.3303741677346754,
+ "grad_norm": 0.6815193295478821,
+ "learning_rate": 0.00011757124357145881,
+ "loss": 1.4952,
+ "step": 1330
+ },
+ {
+ "epoch": 1.3313744490637993,
+ "grad_norm": 0.6513908505439758,
+ "learning_rate": 0.00011746804026618452,
+ "loss": 1.5202,
+ "step": 1331
+ },
+ {
+ "epoch": 1.332374730392923,
+ "grad_norm": 0.6763479113578796,
+ "learning_rate": 0.00011736481776669306,
+ "loss": 1.4547,
+ "step": 1332
+ },
+ {
+ "epoch": 1.3333750117220469,
+ "grad_norm": 0.6361576914787292,
+ "learning_rate": 0.00011726157618640728,
+ "loss": 1.3231,
+ "step": 1333
+ },
+ {
+ "epoch": 1.3343752930511705,
+ "grad_norm": 0.7247117161750793,
+ "learning_rate": 0.00011715831563877104,
+ "loss": 1.5126,
+ "step": 1334
+ },
+ {
+ "epoch": 1.3353755743802944,
+ "grad_norm": 0.8347336649894714,
+ "learning_rate": 0.00011705503623724898,
+ "loss": 1.5669,
+ "step": 1335
+ },
+ {
+ "epoch": 1.3363758557094183,
+ "grad_norm": 0.6969489455223083,
+ "learning_rate": 0.00011695173809532652,
+ "loss": 1.3646,
+ "step": 1336
+ },
+ {
+ "epoch": 1.3373761370385422,
+ "grad_norm": 0.6771052479743958,
+ "learning_rate": 0.00011684842132650957,
+ "loss": 1.6715,
+ "step": 1337
+ },
+ {
+ "epoch": 1.3383764183676659,
+ "grad_norm": 0.6224768161773682,
+ "learning_rate": 0.00011674508604432464,
+ "loss": 1.5712,
+ "step": 1338
+ },
+ {
+ "epoch": 1.3393766996967897,
+ "grad_norm": 0.6684542298316956,
+ "learning_rate": 0.00011664173236231848,
+ "loss": 1.5669,
+ "step": 1339
+ },
+ {
+ "epoch": 1.3403769810259134,
+ "grad_norm": 0.6805415153503418,
+ "learning_rate": 0.0001165383603940581,
+ "loss": 1.3648,
+ "step": 1340
+ },
+ {
+ "epoch": 1.3413772623550373,
+ "grad_norm": 0.7991671562194824,
+ "learning_rate": 0.00011643497025313061,
+ "loss": 1.3703,
+ "step": 1341
+ },
+ {
+ "epoch": 1.3423775436841612,
+ "grad_norm": 0.7023015022277832,
+ "learning_rate": 0.00011633156205314309,
+ "loss": 1.366,
+ "step": 1342
+ },
+ {
+ "epoch": 1.343377825013285,
+ "grad_norm": 0.7017894387245178,
+ "learning_rate": 0.00011622813590772244,
+ "loss": 1.4816,
+ "step": 1343
+ },
+ {
+ "epoch": 1.3443781063424087,
+ "grad_norm": 0.704626739025116,
+ "learning_rate": 0.00011612469193051525,
+ "loss": 1.2841,
+ "step": 1344
+ },
+ {
+ "epoch": 1.3453783876715326,
+ "grad_norm": 0.707372784614563,
+ "learning_rate": 0.00011602123023518779,
+ "loss": 1.3226,
+ "step": 1345
+ },
+ {
+ "epoch": 1.3463786690006565,
+ "grad_norm": 0.6367921233177185,
+ "learning_rate": 0.00011591775093542572,
+ "loss": 1.3292,
+ "step": 1346
+ },
+ {
+ "epoch": 1.3473789503297802,
+ "grad_norm": 0.7131246328353882,
+ "learning_rate": 0.0001158142541449341,
+ "loss": 1.3537,
+ "step": 1347
+ },
+ {
+ "epoch": 1.348379231658904,
+ "grad_norm": 0.6140089631080627,
+ "learning_rate": 0.00011571073997743716,
+ "loss": 1.4316,
+ "step": 1348
+ },
+ {
+ "epoch": 1.349379512988028,
+ "grad_norm": 0.6347403526306152,
+ "learning_rate": 0.0001156072085466783,
+ "loss": 1.4214,
+ "step": 1349
+ },
+ {
+ "epoch": 1.3503797943171518,
+ "grad_norm": 0.5540759563446045,
+ "learning_rate": 0.00011550365996641979,
+ "loss": 1.25,
+ "step": 1350
+ },
+ {
+ "epoch": 1.3513800756462755,
+ "grad_norm": 0.6721670031547546,
+ "learning_rate": 0.00011540009435044281,
+ "loss": 1.381,
+ "step": 1351
+ },
+ {
+ "epoch": 1.3523803569753994,
+ "grad_norm": 0.5901767015457153,
+ "learning_rate": 0.00011529651181254723,
+ "loss": 1.5127,
+ "step": 1352
+ },
+ {
+ "epoch": 1.353380638304523,
+ "grad_norm": 0.6438884139060974,
+ "learning_rate": 0.0001151929124665516,
+ "loss": 1.3348,
+ "step": 1353
+ },
+ {
+ "epoch": 1.354380919633647,
+ "grad_norm": 0.9631819725036621,
+ "learning_rate": 0.00011508929642629274,
+ "loss": 1.5822,
+ "step": 1354
+ },
+ {
+ "epoch": 1.3553812009627708,
+ "grad_norm": 0.6426034569740295,
+ "learning_rate": 0.00011498566380562601,
+ "loss": 1.3487,
+ "step": 1355
+ },
+ {
+ "epoch": 1.3563814822918947,
+ "grad_norm": 0.682264506816864,
+ "learning_rate": 0.0001148820147184249,
+ "loss": 1.3331,
+ "step": 1356
+ },
+ {
+ "epoch": 1.3573817636210184,
+ "grad_norm": 0.746907114982605,
+ "learning_rate": 0.00011477834927858104,
+ "loss": 1.38,
+ "step": 1357
+ },
+ {
+ "epoch": 1.3583820449501423,
+ "grad_norm": 0.7521925568580627,
+ "learning_rate": 0.00011467466760000399,
+ "loss": 1.3798,
+ "step": 1358
+ },
+ {
+ "epoch": 1.359382326279266,
+ "grad_norm": 0.7887171506881714,
+ "learning_rate": 0.00011457096979662114,
+ "loss": 1.3306,
+ "step": 1359
+ },
+ {
+ "epoch": 1.3603826076083898,
+ "grad_norm": 0.7623118162155151,
+ "learning_rate": 0.00011446725598237767,
+ "loss": 1.4307,
+ "step": 1360
+ },
+ {
+ "epoch": 1.3613828889375137,
+ "grad_norm": 0.6182076930999756,
+ "learning_rate": 0.00011436352627123623,
+ "loss": 1.3776,
+ "step": 1361
+ },
+ {
+ "epoch": 1.3623831702666376,
+ "grad_norm": 0.6136983633041382,
+ "learning_rate": 0.00011425978077717709,
+ "loss": 1.4815,
+ "step": 1362
+ },
+ {
+ "epoch": 1.3633834515957612,
+ "grad_norm": 0.6165998578071594,
+ "learning_rate": 0.00011415601961419775,
+ "loss": 1.4252,
+ "step": 1363
+ },
+ {
+ "epoch": 1.3643837329248851,
+ "grad_norm": 0.7338213324546814,
+ "learning_rate": 0.00011405224289631295,
+ "loss": 1.4426,
+ "step": 1364
+ },
+ {
+ "epoch": 1.365384014254009,
+ "grad_norm": 0.8061873316764832,
+ "learning_rate": 0.00011394845073755455,
+ "loss": 1.5366,
+ "step": 1365
+ },
+ {
+ "epoch": 1.3663842955831327,
+ "grad_norm": 0.8496206402778625,
+ "learning_rate": 0.0001138446432519714,
+ "loss": 1.3305,
+ "step": 1366
+ },
+ {
+ "epoch": 1.3673845769122566,
+ "grad_norm": 0.7370564341545105,
+ "learning_rate": 0.00011374082055362909,
+ "loss": 1.5069,
+ "step": 1367
+ },
+ {
+ "epoch": 1.3683848582413805,
+ "grad_norm": 0.630095362663269,
+ "learning_rate": 0.00011363698275661001,
+ "loss": 1.2846,
+ "step": 1368
+ },
+ {
+ "epoch": 1.3693851395705043,
+ "grad_norm": 0.7039877772331238,
+ "learning_rate": 0.00011353312997501313,
+ "loss": 1.4091,
+ "step": 1369
+ },
+ {
+ "epoch": 1.370385420899628,
+ "grad_norm": 0.7010135650634766,
+ "learning_rate": 0.00011342926232295386,
+ "loss": 1.4068,
+ "step": 1370
+ },
+ {
+ "epoch": 1.371385702228752,
+ "grad_norm": 0.6542472243309021,
+ "learning_rate": 0.00011332537991456398,
+ "loss": 1.3422,
+ "step": 1371
+ },
+ {
+ "epoch": 1.3723859835578756,
+ "grad_norm": 0.7767142057418823,
+ "learning_rate": 0.00011322148286399147,
+ "loss": 1.3156,
+ "step": 1372
+ },
+ {
+ "epoch": 1.3733862648869994,
+ "grad_norm": 0.7862635254859924,
+ "learning_rate": 0.0001131175712854004,
+ "loss": 1.3952,
+ "step": 1373
+ },
+ {
+ "epoch": 1.3743865462161233,
+ "grad_norm": 0.7444994449615479,
+ "learning_rate": 0.00011301364529297079,
+ "loss": 1.2304,
+ "step": 1374
+ },
+ {
+ "epoch": 1.3753868275452472,
+ "grad_norm": 0.6078478693962097,
+ "learning_rate": 0.0001129097050008985,
+ "loss": 1.4364,
+ "step": 1375
+ },
+ {
+ "epoch": 1.3763871088743709,
+ "grad_norm": 0.7011739015579224,
+ "learning_rate": 0.00011280575052339514,
+ "loss": 1.4455,
+ "step": 1376
+ },
+ {
+ "epoch": 1.3773873902034948,
+ "grad_norm": 0.7185930013656616,
+ "learning_rate": 0.00011270178197468789,
+ "loss": 1.349,
+ "step": 1377
+ },
+ {
+ "epoch": 1.3783876715326184,
+ "grad_norm": 0.6734675168991089,
+ "learning_rate": 0.00011259779946901934,
+ "loss": 1.4803,
+ "step": 1378
+ },
+ {
+ "epoch": 1.3793879528617423,
+ "grad_norm": 0.674439013004303,
+ "learning_rate": 0.0001124938031206475,
+ "loss": 1.5707,
+ "step": 1379
+ },
+ {
+ "epoch": 1.3803882341908662,
+ "grad_norm": 0.6843717694282532,
+ "learning_rate": 0.00011238979304384554,
+ "loss": 1.5508,
+ "step": 1380
+ },
+ {
+ "epoch": 1.38138851551999,
+ "grad_norm": 0.6178708672523499,
+ "learning_rate": 0.0001122857693529017,
+ "loss": 1.2827,
+ "step": 1381
+ },
+ {
+ "epoch": 1.3823887968491138,
+ "grad_norm": 0.721108078956604,
+ "learning_rate": 0.0001121817321621192,
+ "loss": 1.3768,
+ "step": 1382
+ },
+ {
+ "epoch": 1.3833890781782376,
+ "grad_norm": 0.6790196299552917,
+ "learning_rate": 0.00011207768158581613,
+ "loss": 1.508,
+ "step": 1383
+ },
+ {
+ "epoch": 1.3843893595073615,
+ "grad_norm": 0.6942607760429382,
+ "learning_rate": 0.00011197361773832525,
+ "loss": 1.2476,
+ "step": 1384
+ },
+ {
+ "epoch": 1.3853896408364852,
+ "grad_norm": 0.6791447997093201,
+ "learning_rate": 0.00011186954073399387,
+ "loss": 1.5537,
+ "step": 1385
+ },
+ {
+ "epoch": 1.386389922165609,
+ "grad_norm": 0.6842163801193237,
+ "learning_rate": 0.00011176545068718385,
+ "loss": 1.4336,
+ "step": 1386
+ },
+ {
+ "epoch": 1.387390203494733,
+ "grad_norm": 0.6922981142997742,
+ "learning_rate": 0.0001116613477122713,
+ "loss": 1.2496,
+ "step": 1387
+ },
+ {
+ "epoch": 1.3883904848238566,
+ "grad_norm": 0.7480785250663757,
+ "learning_rate": 0.00011155723192364658,
+ "loss": 1.5798,
+ "step": 1388
+ },
+ {
+ "epoch": 1.3893907661529805,
+ "grad_norm": 0.6371482610702515,
+ "learning_rate": 0.00011145310343571411,
+ "loss": 1.326,
+ "step": 1389
+ },
+ {
+ "epoch": 1.3903910474821044,
+ "grad_norm": 0.663810670375824,
+ "learning_rate": 0.00011134896236289224,
+ "loss": 1.3021,
+ "step": 1390
+ },
+ {
+ "epoch": 1.391391328811228,
+ "grad_norm": 0.6067004799842834,
+ "learning_rate": 0.0001112448088196132,
+ "loss": 1.5062,
+ "step": 1391
+ },
+ {
+ "epoch": 1.392391610140352,
+ "grad_norm": 0.6500088572502136,
+ "learning_rate": 0.00011114064292032282,
+ "loss": 1.3196,
+ "step": 1392
+ },
+ {
+ "epoch": 1.3933918914694758,
+ "grad_norm": 0.7117498517036438,
+ "learning_rate": 0.0001110364647794807,
+ "loss": 1.354,
+ "step": 1393
+ },
+ {
+ "epoch": 1.3943921727985997,
+ "grad_norm": 0.5792518854141235,
+ "learning_rate": 0.00011093227451155974,
+ "loss": 1.1684,
+ "step": 1394
+ },
+ {
+ "epoch": 1.3953924541277234,
+ "grad_norm": 0.6920313835144043,
+ "learning_rate": 0.0001108280722310462,
+ "loss": 1.5114,
+ "step": 1395
+ },
+ {
+ "epoch": 1.3963927354568473,
+ "grad_norm": 0.5932325720787048,
+ "learning_rate": 0.0001107238580524395,
+ "loss": 1.4519,
+ "step": 1396
+ },
+ {
+ "epoch": 1.397393016785971,
+ "grad_norm": 0.7241511940956116,
+ "learning_rate": 0.00011061963209025223,
+ "loss": 1.4131,
+ "step": 1397
+ },
+ {
+ "epoch": 1.3983932981150948,
+ "grad_norm": 0.6144927740097046,
+ "learning_rate": 0.00011051539445900983,
+ "loss": 1.4436,
+ "step": 1398
+ },
+ {
+ "epoch": 1.3993935794442187,
+ "grad_norm": 0.589124321937561,
+ "learning_rate": 0.00011041114527325065,
+ "loss": 1.4069,
+ "step": 1399
+ },
+ {
+ "epoch": 1.4003938607733426,
+ "grad_norm": 0.6695122122764587,
+ "learning_rate": 0.00011030688464752566,
+ "loss": 1.6472,
+ "step": 1400
+ },
+ {
+ "epoch": 1.4013941421024663,
+ "grad_norm": 0.6082026362419128,
+ "learning_rate": 0.00011020261269639842,
+ "loss": 1.3188,
+ "step": 1401
+ },
+ {
+ "epoch": 1.4023944234315902,
+ "grad_norm": 0.8926504254341125,
+ "learning_rate": 0.000110098329534445,
+ "loss": 1.4195,
+ "step": 1402
+ },
+ {
+ "epoch": 1.4033947047607138,
+ "grad_norm": 0.6825796365737915,
+ "learning_rate": 0.00010999403527625367,
+ "loss": 1.5214,
+ "step": 1403
+ },
+ {
+ "epoch": 1.4043949860898377,
+ "grad_norm": 0.6856653690338135,
+ "learning_rate": 0.00010988973003642499,
+ "loss": 1.4579,
+ "step": 1404
+ },
+ {
+ "epoch": 1.4053952674189616,
+ "grad_norm": 0.6756052374839783,
+ "learning_rate": 0.00010978541392957156,
+ "loss": 1.331,
+ "step": 1405
+ },
+ {
+ "epoch": 1.4063955487480855,
+ "grad_norm": 0.6562577486038208,
+ "learning_rate": 0.00010968108707031792,
+ "loss": 1.2906,
+ "step": 1406
+ },
+ {
+ "epoch": 1.4073958300772091,
+ "grad_norm": 0.7208355069160461,
+ "learning_rate": 0.00010957674957330042,
+ "loss": 1.5163,
+ "step": 1407
+ },
+ {
+ "epoch": 1.408396111406333,
+ "grad_norm": 0.6576356291770935,
+ "learning_rate": 0.00010947240155316707,
+ "loss": 1.3934,
+ "step": 1408
+ },
+ {
+ "epoch": 1.409396392735457,
+ "grad_norm": 0.6244364976882935,
+ "learning_rate": 0.00010936804312457749,
+ "loss": 1.3903,
+ "step": 1409
+ },
+ {
+ "epoch": 1.4103966740645806,
+ "grad_norm": 0.5791237354278564,
+ "learning_rate": 0.00010926367440220276,
+ "loss": 1.2708,
+ "step": 1410
+ },
+ {
+ "epoch": 1.4113969553937045,
+ "grad_norm": 0.7043458819389343,
+ "learning_rate": 0.00010915929550072517,
+ "loss": 1.2446,
+ "step": 1411
+ },
+ {
+ "epoch": 1.4123972367228284,
+ "grad_norm": 0.5865835547447205,
+ "learning_rate": 0.00010905490653483827,
+ "loss": 1.657,
+ "step": 1412
+ },
+ {
+ "epoch": 1.4133975180519522,
+ "grad_norm": 0.6033587455749512,
+ "learning_rate": 0.00010895050761924668,
+ "loss": 1.4481,
+ "step": 1413
+ },
+ {
+ "epoch": 1.414397799381076,
+ "grad_norm": 0.7100054025650024,
+ "learning_rate": 0.00010884609886866588,
+ "loss": 1.5413,
+ "step": 1414
+ },
+ {
+ "epoch": 1.4153980807101998,
+ "grad_norm": 0.8067667484283447,
+ "learning_rate": 0.00010874168039782227,
+ "loss": 1.4327,
+ "step": 1415
+ },
+ {
+ "epoch": 1.4163983620393235,
+ "grad_norm": 0.7058808207511902,
+ "learning_rate": 0.00010863725232145286,
+ "loss": 1.534,
+ "step": 1416
+ },
+ {
+ "epoch": 1.4173986433684473,
+ "grad_norm": 0.5253664255142212,
+ "learning_rate": 0.00010853281475430517,
+ "loss": 1.0816,
+ "step": 1417
+ },
+ {
+ "epoch": 1.4183989246975712,
+ "grad_norm": 0.6874144673347473,
+ "learning_rate": 0.0001084283678111372,
+ "loss": 1.4386,
+ "step": 1418
+ },
+ {
+ "epoch": 1.4193992060266951,
+ "grad_norm": 0.74046790599823,
+ "learning_rate": 0.00010832391160671729,
+ "loss": 1.3393,
+ "step": 1419
+ },
+ {
+ "epoch": 1.4203994873558188,
+ "grad_norm": 0.6461816430091858,
+ "learning_rate": 0.00010821944625582392,
+ "loss": 1.5251,
+ "step": 1420
+ },
+ {
+ "epoch": 1.4213997686849427,
+ "grad_norm": 0.6058275699615479,
+ "learning_rate": 0.00010811497187324555,
+ "loss": 1.1555,
+ "step": 1421
+ },
+ {
+ "epoch": 1.4224000500140663,
+ "grad_norm": 0.6121847033500671,
+ "learning_rate": 0.00010801048857378071,
+ "loss": 1.248,
+ "step": 1422
+ },
+ {
+ "epoch": 1.4234003313431902,
+ "grad_norm": 0.5949802398681641,
+ "learning_rate": 0.00010790599647223763,
+ "loss": 1.5056,
+ "step": 1423
+ },
+ {
+ "epoch": 1.424400612672314,
+ "grad_norm": 0.6441097259521484,
+ "learning_rate": 0.0001078014956834342,
+ "loss": 1.5484,
+ "step": 1424
+ },
+ {
+ "epoch": 1.425400894001438,
+ "grad_norm": 0.686570942401886,
+ "learning_rate": 0.00010769698632219794,
+ "loss": 1.631,
+ "step": 1425
+ },
+ {
+ "epoch": 1.4264011753305617,
+ "grad_norm": 0.675699770450592,
+ "learning_rate": 0.00010759246850336572,
+ "loss": 1.4044,
+ "step": 1426
+ },
+ {
+ "epoch": 1.4274014566596855,
+ "grad_norm": 0.7777390480041504,
+ "learning_rate": 0.0001074879423417837,
+ "loss": 1.6326,
+ "step": 1427
+ },
+ {
+ "epoch": 1.4284017379888094,
+ "grad_norm": 0.6218550205230713,
+ "learning_rate": 0.00010738340795230721,
+ "loss": 1.5341,
+ "step": 1428
+ },
+ {
+ "epoch": 1.429402019317933,
+ "grad_norm": 0.7164304256439209,
+ "learning_rate": 0.00010727886544980068,
+ "loss": 1.5888,
+ "step": 1429
+ },
+ {
+ "epoch": 1.430402300647057,
+ "grad_norm": 0.6924182772636414,
+ "learning_rate": 0.00010717431494913741,
+ "loss": 1.2542,
+ "step": 1430
+ },
+ {
+ "epoch": 1.4314025819761809,
+ "grad_norm": 0.6145774126052856,
+ "learning_rate": 0.00010706975656519946,
+ "loss": 1.4038,
+ "step": 1431
+ },
+ {
+ "epoch": 1.4324028633053048,
+ "grad_norm": 0.5560014843940735,
+ "learning_rate": 0.00010696519041287765,
+ "loss": 1.2659,
+ "step": 1432
+ },
+ {
+ "epoch": 1.4334031446344284,
+ "grad_norm": 0.6854751110076904,
+ "learning_rate": 0.0001068606166070712,
+ "loss": 1.4407,
+ "step": 1433
+ },
+ {
+ "epoch": 1.4344034259635523,
+ "grad_norm": 0.6936755180358887,
+ "learning_rate": 0.00010675603526268785,
+ "loss": 1.4219,
+ "step": 1434
+ },
+ {
+ "epoch": 1.435403707292676,
+ "grad_norm": 0.8174700736999512,
+ "learning_rate": 0.00010665144649464356,
+ "loss": 1.397,
+ "step": 1435
+ },
+ {
+ "epoch": 1.4364039886217999,
+ "grad_norm": 0.7231045365333557,
+ "learning_rate": 0.00010654685041786249,
+ "loss": 1.4558,
+ "step": 1436
+ },
+ {
+ "epoch": 1.4374042699509237,
+ "grad_norm": 0.6431431174278259,
+ "learning_rate": 0.00010644224714727681,
+ "loss": 1.3522,
+ "step": 1437
+ },
+ {
+ "epoch": 1.4384045512800476,
+ "grad_norm": 0.7025414109230042,
+ "learning_rate": 0.0001063376367978266,
+ "loss": 1.2723,
+ "step": 1438
+ },
+ {
+ "epoch": 1.4394048326091713,
+ "grad_norm": 0.6382789611816406,
+ "learning_rate": 0.00010623301948445971,
+ "loss": 1.4065,
+ "step": 1439
+ },
+ {
+ "epoch": 1.4404051139382952,
+ "grad_norm": 0.7055328488349915,
+ "learning_rate": 0.00010612839532213164,
+ "loss": 1.6186,
+ "step": 1440
+ },
+ {
+ "epoch": 1.4414053952674188,
+ "grad_norm": 0.6769623160362244,
+ "learning_rate": 0.00010602376442580544,
+ "loss": 1.49,
+ "step": 1441
+ },
+ {
+ "epoch": 1.4424056765965427,
+ "grad_norm": 0.6875097751617432,
+ "learning_rate": 0.00010591912691045152,
+ "loss": 1.3063,
+ "step": 1442
+ },
+ {
+ "epoch": 1.4434059579256666,
+ "grad_norm": 0.7747283577919006,
+ "learning_rate": 0.00010581448289104758,
+ "loss": 1.67,
+ "step": 1443
+ },
+ {
+ "epoch": 1.4444062392547905,
+ "grad_norm": 0.7236614227294922,
+ "learning_rate": 0.00010570983248257853,
+ "loss": 1.4703,
+ "step": 1444
+ },
+ {
+ "epoch": 1.4454065205839142,
+ "grad_norm": 0.7141956686973572,
+ "learning_rate": 0.00010560517580003617,
+ "loss": 1.5828,
+ "step": 1445
+ },
+ {
+ "epoch": 1.446406801913038,
+ "grad_norm": 0.679790198802948,
+ "learning_rate": 0.00010550051295841931,
+ "loss": 1.4706,
+ "step": 1446
+ },
+ {
+ "epoch": 1.447407083242162,
+ "grad_norm": 0.7930448651313782,
+ "learning_rate": 0.00010539584407273349,
+ "loss": 1.5388,
+ "step": 1447
+ },
+ {
+ "epoch": 1.4484073645712856,
+ "grad_norm": 0.7099994421005249,
+ "learning_rate": 0.00010529116925799085,
+ "loss": 1.408,
+ "step": 1448
+ },
+ {
+ "epoch": 1.4494076459004095,
+ "grad_norm": 0.6459046602249146,
+ "learning_rate": 0.00010518648862921012,
+ "loss": 1.3263,
+ "step": 1449
+ },
+ {
+ "epoch": 1.4504079272295334,
+ "grad_norm": 0.761446475982666,
+ "learning_rate": 0.00010508180230141635,
+ "loss": 1.3166,
+ "step": 1450
+ },
+ {
+ "epoch": 1.451408208558657,
+ "grad_norm": 0.6198295950889587,
+ "learning_rate": 0.00010497711038964086,
+ "loss": 1.3191,
+ "step": 1451
+ },
+ {
+ "epoch": 1.452408489887781,
+ "grad_norm": 0.6751184463500977,
+ "learning_rate": 0.0001048724130089212,
+ "loss": 1.1869,
+ "step": 1452
+ },
+ {
+ "epoch": 1.4534087712169048,
+ "grad_norm": 0.6853645443916321,
+ "learning_rate": 0.00010476771027430086,
+ "loss": 1.2579,
+ "step": 1453
+ },
+ {
+ "epoch": 1.4544090525460285,
+ "grad_norm": 0.6402629017829895,
+ "learning_rate": 0.00010466300230082911,
+ "loss": 1.3192,
+ "step": 1454
+ },
+ {
+ "epoch": 1.4554093338751524,
+ "grad_norm": 0.7862108945846558,
+ "learning_rate": 0.00010455828920356115,
+ "loss": 1.3619,
+ "step": 1455
+ },
+ {
+ "epoch": 1.4564096152042763,
+ "grad_norm": 0.7008057832717896,
+ "learning_rate": 0.00010445357109755771,
+ "loss": 1.3524,
+ "step": 1456
+ },
+ {
+ "epoch": 1.4574098965334001,
+ "grad_norm": 0.6477895379066467,
+ "learning_rate": 0.00010434884809788508,
+ "loss": 1.6172,
+ "step": 1457
+ },
+ {
+ "epoch": 1.4584101778625238,
+ "grad_norm": 0.6312345862388611,
+ "learning_rate": 0.00010424412031961484,
+ "loss": 1.2121,
+ "step": 1458
+ },
+ {
+ "epoch": 1.4594104591916477,
+ "grad_norm": 0.6922104358673096,
+ "learning_rate": 0.00010413938787782394,
+ "loss": 1.3243,
+ "step": 1459
+ },
+ {
+ "epoch": 1.4604107405207714,
+ "grad_norm": 0.670599102973938,
+ "learning_rate": 0.00010403465088759437,
+ "loss": 1.346,
+ "step": 1460
+ },
+ {
+ "epoch": 1.4614110218498952,
+ "grad_norm": 0.7351789474487305,
+ "learning_rate": 0.00010392990946401313,
+ "loss": 1.5343,
+ "step": 1461
+ },
+ {
+ "epoch": 1.4624113031790191,
+ "grad_norm": 0.6756190061569214,
+ "learning_rate": 0.00010382516372217215,
+ "loss": 1.4118,
+ "step": 1462
+ },
+ {
+ "epoch": 1.463411584508143,
+ "grad_norm": 0.6219121217727661,
+ "learning_rate": 0.000103720413777168,
+ "loss": 1.3602,
+ "step": 1463
+ },
+ {
+ "epoch": 1.4644118658372667,
+ "grad_norm": 0.6602663993835449,
+ "learning_rate": 0.00010361565974410192,
+ "loss": 1.4921,
+ "step": 1464
+ },
+ {
+ "epoch": 1.4654121471663906,
+ "grad_norm": 0.6103453636169434,
+ "learning_rate": 0.00010351090173807969,
+ "loss": 1.3259,
+ "step": 1465
+ },
+ {
+ "epoch": 1.4664124284955142,
+ "grad_norm": 0.745473325252533,
+ "learning_rate": 0.00010340613987421137,
+ "loss": 1.6036,
+ "step": 1466
+ },
+ {
+ "epoch": 1.4674127098246381,
+ "grad_norm": 0.6537976861000061,
+ "learning_rate": 0.00010330137426761135,
+ "loss": 1.3511,
+ "step": 1467
+ },
+ {
+ "epoch": 1.468412991153762,
+ "grad_norm": 0.7108463048934937,
+ "learning_rate": 0.00010319660503339808,
+ "loss": 1.4814,
+ "step": 1468
+ },
+ {
+ "epoch": 1.469413272482886,
+ "grad_norm": 0.6372820734977722,
+ "learning_rate": 0.00010309183228669397,
+ "loss": 1.468,
+ "step": 1469
+ },
+ {
+ "epoch": 1.4704135538120096,
+ "grad_norm": 0.6098326444625854,
+ "learning_rate": 0.00010298705614262532,
+ "loss": 1.6763,
+ "step": 1470
+ },
+ {
+ "epoch": 1.4714138351411334,
+ "grad_norm": 0.6385009288787842,
+ "learning_rate": 0.0001028822767163222,
+ "loss": 1.3058,
+ "step": 1471
+ },
+ {
+ "epoch": 1.4724141164702573,
+ "grad_norm": 0.6848032474517822,
+ "learning_rate": 0.00010277749412291824,
+ "loss": 1.4115,
+ "step": 1472
+ },
+ {
+ "epoch": 1.473414397799381,
+ "grad_norm": 0.7532572746276855,
+ "learning_rate": 0.00010267270847755048,
+ "loss": 1.4219,
+ "step": 1473
+ },
+ {
+ "epoch": 1.4744146791285049,
+ "grad_norm": 0.7336605787277222,
+ "learning_rate": 0.00010256791989535952,
+ "loss": 1.4092,
+ "step": 1474
+ },
+ {
+ "epoch": 1.4754149604576288,
+ "grad_norm": 0.6300507187843323,
+ "learning_rate": 0.00010246312849148899,
+ "loss": 1.2911,
+ "step": 1475
+ },
+ {
+ "epoch": 1.4764152417867527,
+ "grad_norm": 0.7114218473434448,
+ "learning_rate": 0.00010235833438108571,
+ "loss": 1.5038,
+ "step": 1476
+ },
+ {
+ "epoch": 1.4774155231158763,
+ "grad_norm": 0.7215398550033569,
+ "learning_rate": 0.00010225353767929944,
+ "loss": 1.4919,
+ "step": 1477
+ },
+ {
+ "epoch": 1.4784158044450002,
+ "grad_norm": 0.6189507246017456,
+ "learning_rate": 0.00010214873850128282,
+ "loss": 1.2092,
+ "step": 1478
+ },
+ {
+ "epoch": 1.4794160857741239,
+ "grad_norm": 0.5806283950805664,
+ "learning_rate": 0.00010204393696219117,
+ "loss": 1.2862,
+ "step": 1479
+ },
+ {
+ "epoch": 1.4804163671032478,
+ "grad_norm": 0.7068900465965271,
+ "learning_rate": 0.00010193913317718244,
+ "loss": 1.319,
+ "step": 1480
+ },
+ {
+ "epoch": 1.4814166484323716,
+ "grad_norm": 0.749792218208313,
+ "learning_rate": 0.00010183432726141706,
+ "loss": 1.3661,
+ "step": 1481
+ },
+ {
+ "epoch": 1.4824169297614955,
+ "grad_norm": 0.7314055562019348,
+ "learning_rate": 0.00010172951933005775,
+ "loss": 1.5695,
+ "step": 1482
+ },
+ {
+ "epoch": 1.4834172110906192,
+ "grad_norm": 0.6871920228004456,
+ "learning_rate": 0.00010162470949826948,
+ "loss": 1.3598,
+ "step": 1483
+ },
+ {
+ "epoch": 1.484417492419743,
+ "grad_norm": 0.7139384150505066,
+ "learning_rate": 0.0001015198978812193,
+ "loss": 1.4942,
+ "step": 1484
+ },
+ {
+ "epoch": 1.4854177737488667,
+ "grad_norm": 0.6459400653839111,
+ "learning_rate": 0.00010141508459407623,
+ "loss": 1.3971,
+ "step": 1485
+ },
+ {
+ "epoch": 1.4864180550779906,
+ "grad_norm": 0.8157202005386353,
+ "learning_rate": 0.0001013102697520111,
+ "loss": 1.4679,
+ "step": 1486
+ },
+ {
+ "epoch": 1.4874183364071145,
+ "grad_norm": 0.6978387832641602,
+ "learning_rate": 0.00010120545347019647,
+ "loss": 1.4547,
+ "step": 1487
+ },
+ {
+ "epoch": 1.4884186177362384,
+ "grad_norm": 0.641835629940033,
+ "learning_rate": 0.00010110063586380646,
+ "loss": 1.6611,
+ "step": 1488
+ },
+ {
+ "epoch": 1.489418899065362,
+ "grad_norm": 0.723709225654602,
+ "learning_rate": 0.00010099581704801673,
+ "loss": 1.3994,
+ "step": 1489
+ },
+ {
+ "epoch": 1.490419180394486,
+ "grad_norm": 0.6613619327545166,
+ "learning_rate": 0.00010089099713800414,
+ "loss": 1.5722,
+ "step": 1490
+ },
+ {
+ "epoch": 1.4914194617236098,
+ "grad_norm": 0.6406750082969666,
+ "learning_rate": 0.00010078617624894684,
+ "loss": 1.312,
+ "step": 1491
+ },
+ {
+ "epoch": 1.4924197430527335,
+ "grad_norm": 0.5216225385665894,
+ "learning_rate": 0.000100681354496024,
+ "loss": 1.3552,
+ "step": 1492
+ },
+ {
+ "epoch": 1.4934200243818574,
+ "grad_norm": 0.7549086809158325,
+ "learning_rate": 0.00010057653199441581,
+ "loss": 1.4344,
+ "step": 1493
+ },
+ {
+ "epoch": 1.4944203057109813,
+ "grad_norm": 0.6958007216453552,
+ "learning_rate": 0.00010047170885930324,
+ "loss": 1.254,
+ "step": 1494
+ },
+ {
+ "epoch": 1.4954205870401052,
+ "grad_norm": 0.706564724445343,
+ "learning_rate": 0.00010036688520586788,
+ "loss": 1.4854,
+ "step": 1495
+ },
+ {
+ "epoch": 1.4964208683692288,
+ "grad_norm": 0.6802704930305481,
+ "learning_rate": 0.00010026206114929209,
+ "loss": 1.4631,
+ "step": 1496
+ },
+ {
+ "epoch": 1.4974211496983527,
+ "grad_norm": 0.645449697971344,
+ "learning_rate": 0.00010015723680475846,
+ "loss": 1.5165,
+ "step": 1497
+ },
+ {
+ "epoch": 1.4984214310274764,
+ "grad_norm": 0.5729085206985474,
+ "learning_rate": 0.00010005241228745004,
+ "loss": 1.2683,
+ "step": 1498
+ },
+ {
+ "epoch": 1.4994217123566003,
+ "grad_norm": 0.6592169404029846,
+ "learning_rate": 9.994758771254997e-05,
+ "loss": 1.4722,
+ "step": 1499
+ },
+ {
+ "epoch": 1.5004219936857242,
+ "grad_norm": 0.6299737691879272,
+ "learning_rate": 9.984276319524154e-05,
+ "loss": 1.3664,
+ "step": 1500
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 2997,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 3,
+ "save_steps": 500,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 1.2154019925393408e+16,
+ "train_batch_size": 2,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-1500/training_args.bin b/checkpoint-1500/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..6831a6d623a8a2b84942bb5584c6aa5bc14eee51
--- /dev/null
+++ b/checkpoint-1500/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5db131d6e82df60077bab037ec35113e1b0836a0bd72bb0a21e3fc0311a527de
+size 5304
diff --git a/checkpoint-2000/config.json b/checkpoint-2000/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..49d4bd1e1961ef7daf9af9a7dbe539789d58d949
--- /dev/null
+++ b/checkpoint-2000/config.json
@@ -0,0 +1,35 @@
+{
+ "_name_or_path": "facebook/nllb-200-distilled-600M",
+ "activation_dropout": 0.0,
+ "activation_function": "relu",
+ "architectures": [
+ "M2M100ForConditionalGeneration"
+ ],
+ "attention_dropout": 0.1,
+ "bos_token_id": 0,
+ "d_model": 1024,
+ "decoder_attention_heads": 16,
+ "decoder_ffn_dim": 4096,
+ "decoder_layerdrop": 0,
+ "decoder_layers": 12,
+ "decoder_start_token_id": 2,
+ "dropout": 0.1,
+ "encoder_attention_heads": 16,
+ "encoder_ffn_dim": 4096,
+ "encoder_layerdrop": 0,
+ "encoder_layers": 12,
+ "eos_token_id": 2,
+ "init_std": 0.02,
+ "is_encoder_decoder": true,
+ "max_length": 200,
+ "max_position_embeddings": 1024,
+ "model_type": "m2m_100",
+ "num_hidden_layers": 12,
+ "pad_token_id": 1,
+ "scale_embedding": true,
+ "tokenizer_class": "NllbTokenizer",
+ "torch_dtype": "float32",
+ "transformers_version": "4.43.1",
+ "use_cache": true,
+ "vocab_size": 256206
+}
diff --git a/checkpoint-2000/generation_config.json b/checkpoint-2000/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..680d3e0504023804deeb427766576194a0f17d47
--- /dev/null
+++ b/checkpoint-2000/generation_config.json
@@ -0,0 +1,9 @@
+{
+ "_from_model_config": true,
+ "bos_token_id": 0,
+ "decoder_start_token_id": 2,
+ "eos_token_id": 2,
+ "max_length": 200,
+ "pad_token_id": 1,
+ "transformers_version": "4.43.1"
+}
diff --git a/checkpoint-2000/model.safetensors b/checkpoint-2000/model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..f07547b296d07e668fad0729062d7bbb1fdb3695
--- /dev/null
+++ b/checkpoint-2000/model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f5c35a5aa3ca01db6281ce61a334d16dc3678b011cae3ff0f716dc1a7fb645fd
+size 2460354912
diff --git a/checkpoint-2000/optimizer.pt b/checkpoint-2000/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..f55e459ff832740d512a748765d501133f38ce78
--- /dev/null
+++ b/checkpoint-2000/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6c14448f5da9b2a3f180bf40b75218c0ff91ee94ed126139ca9101e06f5d5d4e
+size 5125261
diff --git a/checkpoint-2000/rng_state.pth b/checkpoint-2000/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..79d89cf474fa45ca2eb34b1a4fe45b51940ae645
--- /dev/null
+++ b/checkpoint-2000/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8ab4089e08f1bd79b075ea9fc207bd9650fac6b19774bca44f3c9ce34adfc6b1
+size 14244
diff --git a/checkpoint-2000/scheduler.pt b/checkpoint-2000/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..b51f382f38c14543c5d9f760498ad98ccd2a5791
--- /dev/null
+++ b/checkpoint-2000/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:af2c02e1aee6f43146c60a10904b177fae4453e7ff3b69f22b17189b83f41a87
+size 1064
diff --git a/checkpoint-2000/sentencepiece.bpe.model b/checkpoint-2000/sentencepiece.bpe.model
new file mode 100644
index 0000000000000000000000000000000000000000..dc2262d3e1d375b235eb71c24119c8e73f85d4ad
--- /dev/null
+++ b/checkpoint-2000/sentencepiece.bpe.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:14bb8dfb35c0ffdea7bc01e56cea38b9e3d5efcdcb9c251d6b40538e1aab555a
+size 4852054
diff --git a/checkpoint-2000/special_tokens_map.json b/checkpoint-2000/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..770c6f4e25faf27bbc3878b806f2ecfb88c5169e
--- /dev/null
+++ b/checkpoint-2000/special_tokens_map.json
@@ -0,0 +1,255 @@
+{
+ "additional_special_tokens": [
+ "ace_Arab",
+ "ace_Latn",
+ "acm_Arab",
+ "acq_Arab",
+ "aeb_Arab",
+ "afr_Latn",
+ "ajp_Arab",
+ "aka_Latn",
+ "amh_Ethi",
+ "apc_Arab",
+ "arb_Arab",
+ "ars_Arab",
+ "ary_Arab",
+ "arz_Arab",
+ "asm_Beng",
+ "ast_Latn",
+ "awa_Deva",
+ "ayr_Latn",
+ "azb_Arab",
+ "azj_Latn",
+ "bak_Cyrl",
+ "bam_Latn",
+ "ban_Latn",
+ "bel_Cyrl",
+ "bem_Latn",
+ "ben_Beng",
+ "bho_Deva",
+ "bjn_Arab",
+ "bjn_Latn",
+ "bod_Tibt",
+ "bos_Latn",
+ "bug_Latn",
+ "bul_Cyrl",
+ "cat_Latn",
+ "ceb_Latn",
+ "ces_Latn",
+ "cjk_Latn",
+ "ckb_Arab",
+ "crh_Latn",
+ "cym_Latn",
+ "dan_Latn",
+ "deu_Latn",
+ "dik_Latn",
+ "dyu_Latn",
+ "dzo_Tibt",
+ "ell_Grek",
+ "eng_Latn",
+ "epo_Latn",
+ "est_Latn",
+ "eus_Latn",
+ "ewe_Latn",
+ "fao_Latn",
+ "pes_Arab",
+ "fij_Latn",
+ "fin_Latn",
+ "fon_Latn",
+ "fra_Latn",
+ "fur_Latn",
+ "fuv_Latn",
+ "gla_Latn",
+ "gle_Latn",
+ "glg_Latn",
+ "grn_Latn",
+ "guj_Gujr",
+ "hat_Latn",
+ "hau_Latn",
+ "heb_Hebr",
+ "hin_Deva",
+ "hne_Deva",
+ "hrv_Latn",
+ "hun_Latn",
+ "hye_Armn",
+ "ibo_Latn",
+ "ilo_Latn",
+ "ind_Latn",
+ "isl_Latn",
+ "ita_Latn",
+ "jav_Latn",
+ "jpn_Jpan",
+ "kab_Latn",
+ "kac_Latn",
+ "kam_Latn",
+ "kan_Knda",
+ "kas_Arab",
+ "kas_Deva",
+ "kat_Geor",
+ "knc_Arab",
+ "knc_Latn",
+ "kaz_Cyrl",
+ "kbp_Latn",
+ "kea_Latn",
+ "khm_Khmr",
+ "kik_Latn",
+ "kin_Latn",
+ "kir_Cyrl",
+ "kmb_Latn",
+ "kon_Latn",
+ "kor_Hang",
+ "kmr_Latn",
+ "lao_Laoo",
+ "lvs_Latn",
+ "lij_Latn",
+ "lim_Latn",
+ "lin_Latn",
+ "lit_Latn",
+ "lmo_Latn",
+ "ltg_Latn",
+ "ltz_Latn",
+ "lua_Latn",
+ "lug_Latn",
+ "luo_Latn",
+ "lus_Latn",
+ "mag_Deva",
+ "mai_Deva",
+ "mal_Mlym",
+ "mar_Deva",
+ "min_Latn",
+ "mkd_Cyrl",
+ "plt_Latn",
+ "mlt_Latn",
+ "mni_Beng",
+ "khk_Cyrl",
+ "mos_Latn",
+ "mri_Latn",
+ "zsm_Latn",
+ "mya_Mymr",
+ "nld_Latn",
+ "nno_Latn",
+ "nob_Latn",
+ "npi_Deva",
+ "nso_Latn",
+ "nus_Latn",
+ "nya_Latn",
+ "oci_Latn",
+ "gaz_Latn",
+ "ory_Orya",
+ "pag_Latn",
+ "pan_Guru",
+ "pap_Latn",
+ "pol_Latn",
+ "por_Latn",
+ "prs_Arab",
+ "pbt_Arab",
+ "quy_Latn",
+ "ron_Latn",
+ "run_Latn",
+ "rus_Cyrl",
+ "sag_Latn",
+ "san_Deva",
+ "sat_Beng",
+ "scn_Latn",
+ "shn_Mymr",
+ "sin_Sinh",
+ "slk_Latn",
+ "slv_Latn",
+ "smo_Latn",
+ "sna_Latn",
+ "snd_Arab",
+ "som_Latn",
+ "sot_Latn",
+ "spa_Latn",
+ "als_Latn",
+ "srd_Latn",
+ "srp_Cyrl",
+ "ssw_Latn",
+ "sun_Latn",
+ "swe_Latn",
+ "swh_Latn",
+ "szl_Latn",
+ "tam_Taml",
+ "tat_Cyrl",
+ "tel_Telu",
+ "tgk_Cyrl",
+ "tgl_Latn",
+ "tha_Thai",
+ "tir_Ethi",
+ "taq_Latn",
+ "taq_Tfng",
+ "tpi_Latn",
+ "tsn_Latn",
+ "tso_Latn",
+ "tuk_Latn",
+ "tum_Latn",
+ "tur_Latn",
+ "twi_Latn",
+ "tzm_Tfng",
+ "uig_Arab",
+ "ukr_Cyrl",
+ "umb_Latn",
+ "urd_Arab",
+ "uzn_Latn",
+ "vec_Latn",
+ "vie_Latn",
+ "war_Latn",
+ "wol_Latn",
+ "xho_Latn",
+ "ydd_Hebr",
+ "yor_Latn",
+ "yue_Hant",
+ "zho_Hans",
+ "zho_Hant",
+ "zul_Latn"
+ ],
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "cls_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "mask_token": {
+ "content": "",
+ "lstrip": true,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "sep_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-2000/tokenizer.json b/checkpoint-2000/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..98050e98b98364c06d83b3f41864076220cb8408
--- /dev/null
+++ b/checkpoint-2000/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3b39b25b0763a1dd69dec54081fafcf10770d9f2538a3bd975a0c4be6d60a9c2
+size 17331294
diff --git a/checkpoint-2000/tokenizer_config.json b/checkpoint-2000/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..f1424d3657c008568198b44be241646482e7e9f2
--- /dev/null
+++ b/checkpoint-2000/tokenizer_config.json
@@ -0,0 +1,1878 @@
+{
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "3": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256001": {
+ "content": "ace_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256002": {
+ "content": "ace_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256003": {
+ "content": "acm_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256004": {
+ "content": "acq_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256005": {
+ "content": "aeb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256006": {
+ "content": "afr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256007": {
+ "content": "ajp_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256008": {
+ "content": "aka_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256009": {
+ "content": "amh_Ethi",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256010": {
+ "content": "apc_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256011": {
+ "content": "arb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256012": {
+ "content": "ars_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256013": {
+ "content": "ary_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256014": {
+ "content": "arz_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256015": {
+ "content": "asm_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256016": {
+ "content": "ast_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256017": {
+ "content": "awa_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256018": {
+ "content": "ayr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256019": {
+ "content": "azb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256020": {
+ "content": "azj_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256021": {
+ "content": "bak_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256022": {
+ "content": "bam_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256023": {
+ "content": "ban_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256024": {
+ "content": "bel_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256025": {
+ "content": "bem_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256026": {
+ "content": "ben_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256027": {
+ "content": "bho_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256028": {
+ "content": "bjn_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256029": {
+ "content": "bjn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256030": {
+ "content": "bod_Tibt",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256031": {
+ "content": "bos_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256032": {
+ "content": "bug_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256033": {
+ "content": "bul_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256034": {
+ "content": "cat_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256035": {
+ "content": "ceb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256036": {
+ "content": "ces_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256037": {
+ "content": "cjk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256038": {
+ "content": "ckb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256039": {
+ "content": "crh_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256040": {
+ "content": "cym_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256041": {
+ "content": "dan_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256042": {
+ "content": "deu_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256043": {
+ "content": "dik_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256044": {
+ "content": "dyu_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256045": {
+ "content": "dzo_Tibt",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256046": {
+ "content": "ell_Grek",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256047": {
+ "content": "eng_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256048": {
+ "content": "epo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256049": {
+ "content": "est_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256050": {
+ "content": "eus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256051": {
+ "content": "ewe_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256052": {
+ "content": "fao_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256053": {
+ "content": "pes_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256054": {
+ "content": "fij_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256055": {
+ "content": "fin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256056": {
+ "content": "fon_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256057": {
+ "content": "fra_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256058": {
+ "content": "fur_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256059": {
+ "content": "fuv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256060": {
+ "content": "gla_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256061": {
+ "content": "gle_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256062": {
+ "content": "glg_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256063": {
+ "content": "grn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256064": {
+ "content": "guj_Gujr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256065": {
+ "content": "hat_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256066": {
+ "content": "hau_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256067": {
+ "content": "heb_Hebr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256068": {
+ "content": "hin_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256069": {
+ "content": "hne_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256070": {
+ "content": "hrv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256071": {
+ "content": "hun_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256072": {
+ "content": "hye_Armn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256073": {
+ "content": "ibo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256074": {
+ "content": "ilo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256075": {
+ "content": "ind_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256076": {
+ "content": "isl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256077": {
+ "content": "ita_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256078": {
+ "content": "jav_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256079": {
+ "content": "jpn_Jpan",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256080": {
+ "content": "kab_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256081": {
+ "content": "kac_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256082": {
+ "content": "kam_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256083": {
+ "content": "kan_Knda",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256084": {
+ "content": "kas_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256085": {
+ "content": "kas_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256086": {
+ "content": "kat_Geor",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256087": {
+ "content": "knc_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256088": {
+ "content": "knc_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256089": {
+ "content": "kaz_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256090": {
+ "content": "kbp_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256091": {
+ "content": "kea_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256092": {
+ "content": "khm_Khmr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256093": {
+ "content": "kik_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256094": {
+ "content": "kin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256095": {
+ "content": "kir_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256096": {
+ "content": "kmb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256097": {
+ "content": "kon_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256098": {
+ "content": "kor_Hang",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256099": {
+ "content": "kmr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256100": {
+ "content": "lao_Laoo",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256101": {
+ "content": "lvs_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256102": {
+ "content": "lij_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256103": {
+ "content": "lim_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256104": {
+ "content": "lin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256105": {
+ "content": "lit_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256106": {
+ "content": "lmo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256107": {
+ "content": "ltg_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256108": {
+ "content": "ltz_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256109": {
+ "content": "lua_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256110": {
+ "content": "lug_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256111": {
+ "content": "luo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256112": {
+ "content": "lus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256113": {
+ "content": "mag_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256114": {
+ "content": "mai_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256115": {
+ "content": "mal_Mlym",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256116": {
+ "content": "mar_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256117": {
+ "content": "min_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256118": {
+ "content": "mkd_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256119": {
+ "content": "plt_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256120": {
+ "content": "mlt_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256121": {
+ "content": "mni_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256122": {
+ "content": "khk_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256123": {
+ "content": "mos_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256124": {
+ "content": "mri_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256125": {
+ "content": "zsm_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256126": {
+ "content": "mya_Mymr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256127": {
+ "content": "nld_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256128": {
+ "content": "nno_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256129": {
+ "content": "nob_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256130": {
+ "content": "npi_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256131": {
+ "content": "nso_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256132": {
+ "content": "nus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256133": {
+ "content": "nya_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256134": {
+ "content": "oci_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256135": {
+ "content": "gaz_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256136": {
+ "content": "ory_Orya",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256137": {
+ "content": "pag_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256138": {
+ "content": "pan_Guru",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256139": {
+ "content": "pap_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256140": {
+ "content": "pol_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256141": {
+ "content": "por_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256142": {
+ "content": "prs_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256143": {
+ "content": "pbt_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256144": {
+ "content": "quy_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256145": {
+ "content": "ron_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256146": {
+ "content": "run_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256147": {
+ "content": "rus_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256148": {
+ "content": "sag_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256149": {
+ "content": "san_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256150": {
+ "content": "sat_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256151": {
+ "content": "scn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256152": {
+ "content": "shn_Mymr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256153": {
+ "content": "sin_Sinh",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256154": {
+ "content": "slk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256155": {
+ "content": "slv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256156": {
+ "content": "smo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256157": {
+ "content": "sna_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256158": {
+ "content": "snd_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256159": {
+ "content": "som_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256160": {
+ "content": "sot_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256161": {
+ "content": "spa_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256162": {
+ "content": "als_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256163": {
+ "content": "srd_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256164": {
+ "content": "srp_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256165": {
+ "content": "ssw_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256166": {
+ "content": "sun_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256167": {
+ "content": "swe_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256168": {
+ "content": "swh_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256169": {
+ "content": "szl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256170": {
+ "content": "tam_Taml",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256171": {
+ "content": "tat_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256172": {
+ "content": "tel_Telu",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256173": {
+ "content": "tgk_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256174": {
+ "content": "tgl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256175": {
+ "content": "tha_Thai",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256176": {
+ "content": "tir_Ethi",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256177": {
+ "content": "taq_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256178": {
+ "content": "taq_Tfng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256179": {
+ "content": "tpi_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256180": {
+ "content": "tsn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256181": {
+ "content": "tso_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256182": {
+ "content": "tuk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256183": {
+ "content": "tum_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256184": {
+ "content": "tur_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256185": {
+ "content": "twi_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256186": {
+ "content": "tzm_Tfng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256187": {
+ "content": "uig_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256188": {
+ "content": "ukr_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256189": {
+ "content": "umb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256190": {
+ "content": "urd_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256191": {
+ "content": "uzn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256192": {
+ "content": "vec_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256193": {
+ "content": "vie_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256194": {
+ "content": "war_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256195": {
+ "content": "wol_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256196": {
+ "content": "xho_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256197": {
+ "content": "ydd_Hebr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256198": {
+ "content": "yor_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256199": {
+ "content": "yue_Hant",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256200": {
+ "content": "zho_Hans",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256201": {
+ "content": "zho_Hant",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256202": {
+ "content": "zul_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256203": {
+ "content": "",
+ "lstrip": true,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "additional_special_tokens": [
+ "ace_Arab",
+ "ace_Latn",
+ "acm_Arab",
+ "acq_Arab",
+ "aeb_Arab",
+ "afr_Latn",
+ "ajp_Arab",
+ "aka_Latn",
+ "amh_Ethi",
+ "apc_Arab",
+ "arb_Arab",
+ "ars_Arab",
+ "ary_Arab",
+ "arz_Arab",
+ "asm_Beng",
+ "ast_Latn",
+ "awa_Deva",
+ "ayr_Latn",
+ "azb_Arab",
+ "azj_Latn",
+ "bak_Cyrl",
+ "bam_Latn",
+ "ban_Latn",
+ "bel_Cyrl",
+ "bem_Latn",
+ "ben_Beng",
+ "bho_Deva",
+ "bjn_Arab",
+ "bjn_Latn",
+ "bod_Tibt",
+ "bos_Latn",
+ "bug_Latn",
+ "bul_Cyrl",
+ "cat_Latn",
+ "ceb_Latn",
+ "ces_Latn",
+ "cjk_Latn",
+ "ckb_Arab",
+ "crh_Latn",
+ "cym_Latn",
+ "dan_Latn",
+ "deu_Latn",
+ "dik_Latn",
+ "dyu_Latn",
+ "dzo_Tibt",
+ "ell_Grek",
+ "eng_Latn",
+ "epo_Latn",
+ "est_Latn",
+ "eus_Latn",
+ "ewe_Latn",
+ "fao_Latn",
+ "pes_Arab",
+ "fij_Latn",
+ "fin_Latn",
+ "fon_Latn",
+ "fra_Latn",
+ "fur_Latn",
+ "fuv_Latn",
+ "gla_Latn",
+ "gle_Latn",
+ "glg_Latn",
+ "grn_Latn",
+ "guj_Gujr",
+ "hat_Latn",
+ "hau_Latn",
+ "heb_Hebr",
+ "hin_Deva",
+ "hne_Deva",
+ "hrv_Latn",
+ "hun_Latn",
+ "hye_Armn",
+ "ibo_Latn",
+ "ilo_Latn",
+ "ind_Latn",
+ "isl_Latn",
+ "ita_Latn",
+ "jav_Latn",
+ "jpn_Jpan",
+ "kab_Latn",
+ "kac_Latn",
+ "kam_Latn",
+ "kan_Knda",
+ "kas_Arab",
+ "kas_Deva",
+ "kat_Geor",
+ "knc_Arab",
+ "knc_Latn",
+ "kaz_Cyrl",
+ "kbp_Latn",
+ "kea_Latn",
+ "khm_Khmr",
+ "kik_Latn",
+ "kin_Latn",
+ "kir_Cyrl",
+ "kmb_Latn",
+ "kon_Latn",
+ "kor_Hang",
+ "kmr_Latn",
+ "lao_Laoo",
+ "lvs_Latn",
+ "lij_Latn",
+ "lim_Latn",
+ "lin_Latn",
+ "lit_Latn",
+ "lmo_Latn",
+ "ltg_Latn",
+ "ltz_Latn",
+ "lua_Latn",
+ "lug_Latn",
+ "luo_Latn",
+ "lus_Latn",
+ "mag_Deva",
+ "mai_Deva",
+ "mal_Mlym",
+ "mar_Deva",
+ "min_Latn",
+ "mkd_Cyrl",
+ "plt_Latn",
+ "mlt_Latn",
+ "mni_Beng",
+ "khk_Cyrl",
+ "mos_Latn",
+ "mri_Latn",
+ "zsm_Latn",
+ "mya_Mymr",
+ "nld_Latn",
+ "nno_Latn",
+ "nob_Latn",
+ "npi_Deva",
+ "nso_Latn",
+ "nus_Latn",
+ "nya_Latn",
+ "oci_Latn",
+ "gaz_Latn",
+ "ory_Orya",
+ "pag_Latn",
+ "pan_Guru",
+ "pap_Latn",
+ "pol_Latn",
+ "por_Latn",
+ "prs_Arab",
+ "pbt_Arab",
+ "quy_Latn",
+ "ron_Latn",
+ "run_Latn",
+ "rus_Cyrl",
+ "sag_Latn",
+ "san_Deva",
+ "sat_Beng",
+ "scn_Latn",
+ "shn_Mymr",
+ "sin_Sinh",
+ "slk_Latn",
+ "slv_Latn",
+ "smo_Latn",
+ "sna_Latn",
+ "snd_Arab",
+ "som_Latn",
+ "sot_Latn",
+ "spa_Latn",
+ "als_Latn",
+ "srd_Latn",
+ "srp_Cyrl",
+ "ssw_Latn",
+ "sun_Latn",
+ "swe_Latn",
+ "swh_Latn",
+ "szl_Latn",
+ "tam_Taml",
+ "tat_Cyrl",
+ "tel_Telu",
+ "tgk_Cyrl",
+ "tgl_Latn",
+ "tha_Thai",
+ "tir_Ethi",
+ "taq_Latn",
+ "taq_Tfng",
+ "tpi_Latn",
+ "tsn_Latn",
+ "tso_Latn",
+ "tuk_Latn",
+ "tum_Latn",
+ "tur_Latn",
+ "twi_Latn",
+ "tzm_Tfng",
+ "uig_Arab",
+ "ukr_Cyrl",
+ "umb_Latn",
+ "urd_Arab",
+ "uzn_Latn",
+ "vec_Latn",
+ "vie_Latn",
+ "war_Latn",
+ "wol_Latn",
+ "xho_Latn",
+ "ydd_Hebr",
+ "yor_Latn",
+ "yue_Hant",
+ "zho_Hans",
+ "zho_Hant",
+ "zul_Latn"
+ ],
+ "bos_token": "",
+ "clean_up_tokenization_spaces": true,
+ "cls_token": "",
+ "eos_token": "",
+ "legacy_behaviour": false,
+ "mask_token": "",
+ "model_max_length": 1024,
+ "pad_token": "",
+ "sep_token": "",
+ "sp_model_kwargs": {},
+ "src_lang": "eng_Latn",
+ "tgt_lang": null,
+ "tokenizer_class": "NllbTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-2000/trainer_state.json b/checkpoint-2000/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..57340f88d009534bc8c61629a3879b470c11dcb0
--- /dev/null
+++ b/checkpoint-2000/trainer_state.json
@@ -0,0 +1,14033 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 2.0005626582476324,
+ "eval_steps": 500,
+ "global_step": 2000,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.001000281329123816,
+ "grad_norm": 7.503077030181885,
+ "learning_rate": 0.0001999999450590425,
+ "loss": 3.6513,
+ "step": 1
+ },
+ {
+ "epoch": 0.002000562658247632,
+ "grad_norm": 3.1191189289093018,
+ "learning_rate": 0.00019999978023623033,
+ "loss": 2.8683,
+ "step": 2
+ },
+ {
+ "epoch": 0.003000843987371448,
+ "grad_norm": 1.9282511472702026,
+ "learning_rate": 0.0001999995055317446,
+ "loss": 2.7882,
+ "step": 3
+ },
+ {
+ "epoch": 0.004001125316495264,
+ "grad_norm": 1.726026177406311,
+ "learning_rate": 0.00019999912094588717,
+ "loss": 2.7716,
+ "step": 4
+ },
+ {
+ "epoch": 0.005001406645619081,
+ "grad_norm": 1.4632996320724487,
+ "learning_rate": 0.00019999862647908064,
+ "loss": 2.6869,
+ "step": 5
+ },
+ {
+ "epoch": 0.006001687974742896,
+ "grad_norm": 1.5544543266296387,
+ "learning_rate": 0.00019999802213186834,
+ "loss": 2.6952,
+ "step": 6
+ },
+ {
+ "epoch": 0.007001969303866712,
+ "grad_norm": 1.5888980627059937,
+ "learning_rate": 0.0001999973079049143,
+ "loss": 2.1237,
+ "step": 7
+ },
+ {
+ "epoch": 0.008002250632990529,
+ "grad_norm": 1.8750641345977783,
+ "learning_rate": 0.00019999648379900338,
+ "loss": 2.3376,
+ "step": 8
+ },
+ {
+ "epoch": 0.009002531962114344,
+ "grad_norm": 1.0540648698806763,
+ "learning_rate": 0.0001999955498150411,
+ "loss": 2.4896,
+ "step": 9
+ },
+ {
+ "epoch": 0.010002813291238161,
+ "grad_norm": 1.0269274711608887,
+ "learning_rate": 0.00019999450595405374,
+ "loss": 2.1365,
+ "step": 10
+ },
+ {
+ "epoch": 0.011003094620361977,
+ "grad_norm": 1.0851730108261108,
+ "learning_rate": 0.0001999933522171883,
+ "loss": 2.235,
+ "step": 11
+ },
+ {
+ "epoch": 0.012003375949485792,
+ "grad_norm": 0.927042543888092,
+ "learning_rate": 0.00019999208860571255,
+ "loss": 2.2438,
+ "step": 12
+ },
+ {
+ "epoch": 0.01300365727860961,
+ "grad_norm": 1.3729208707809448,
+ "learning_rate": 0.00019999071512101496,
+ "loss": 2.0845,
+ "step": 13
+ },
+ {
+ "epoch": 0.014003938607733425,
+ "grad_norm": 1.1325910091400146,
+ "learning_rate": 0.00019998923176460474,
+ "loss": 2.0668,
+ "step": 14
+ },
+ {
+ "epoch": 0.01500421993685724,
+ "grad_norm": 0.9290457367897034,
+ "learning_rate": 0.00019998763853811184,
+ "loss": 2.0227,
+ "step": 15
+ },
+ {
+ "epoch": 0.016004501265981057,
+ "grad_norm": 0.942140519618988,
+ "learning_rate": 0.00019998593544328692,
+ "loss": 2.1598,
+ "step": 16
+ },
+ {
+ "epoch": 0.017004782595104875,
+ "grad_norm": 1.096635103225708,
+ "learning_rate": 0.00019998412248200138,
+ "loss": 2.1897,
+ "step": 17
+ },
+ {
+ "epoch": 0.01800506392422869,
+ "grad_norm": 1.1107186079025269,
+ "learning_rate": 0.00019998219965624734,
+ "loss": 2.0546,
+ "step": 18
+ },
+ {
+ "epoch": 0.019005345253352506,
+ "grad_norm": 0.9696593880653381,
+ "learning_rate": 0.0001999801669681376,
+ "loss": 2.0317,
+ "step": 19
+ },
+ {
+ "epoch": 0.020005626582476323,
+ "grad_norm": 0.9394300580024719,
+ "learning_rate": 0.00019997802441990573,
+ "loss": 2.2883,
+ "step": 20
+ },
+ {
+ "epoch": 0.021005907911600136,
+ "grad_norm": 1.08865225315094,
+ "learning_rate": 0.00019997577201390606,
+ "loss": 1.9838,
+ "step": 21
+ },
+ {
+ "epoch": 0.022006189240723954,
+ "grad_norm": 1.0712405443191528,
+ "learning_rate": 0.00019997340975261353,
+ "loss": 2.1177,
+ "step": 22
+ },
+ {
+ "epoch": 0.02300647056984777,
+ "grad_norm": 1.3190314769744873,
+ "learning_rate": 0.00019997093763862383,
+ "loss": 1.9755,
+ "step": 23
+ },
+ {
+ "epoch": 0.024006751898971584,
+ "grad_norm": 1.0659812688827515,
+ "learning_rate": 0.0001999683556746534,
+ "loss": 1.9829,
+ "step": 24
+ },
+ {
+ "epoch": 0.0250070332280954,
+ "grad_norm": 1.1824345588684082,
+ "learning_rate": 0.0001999656638635393,
+ "loss": 2.4219,
+ "step": 25
+ },
+ {
+ "epoch": 0.02600731455721922,
+ "grad_norm": 1.3446214199066162,
+ "learning_rate": 0.0001999628622082394,
+ "loss": 1.9644,
+ "step": 26
+ },
+ {
+ "epoch": 0.027007595886343033,
+ "grad_norm": 1.2527475357055664,
+ "learning_rate": 0.0001999599507118322,
+ "loss": 2.1889,
+ "step": 27
+ },
+ {
+ "epoch": 0.02800787721546685,
+ "grad_norm": 1.4738999605178833,
+ "learning_rate": 0.00019995692937751683,
+ "loss": 2.1949,
+ "step": 28
+ },
+ {
+ "epoch": 0.029008158544590667,
+ "grad_norm": 1.0533576011657715,
+ "learning_rate": 0.0001999537982086133,
+ "loss": 2.1034,
+ "step": 29
+ },
+ {
+ "epoch": 0.03000843987371448,
+ "grad_norm": 1.0343223810195923,
+ "learning_rate": 0.00019995055720856218,
+ "loss": 1.9561,
+ "step": 30
+ },
+ {
+ "epoch": 0.031008721202838298,
+ "grad_norm": 1.1149976253509521,
+ "learning_rate": 0.00019994720638092468,
+ "loss": 2.0981,
+ "step": 31
+ },
+ {
+ "epoch": 0.032009002531962115,
+ "grad_norm": 1.197178840637207,
+ "learning_rate": 0.00019994374572938277,
+ "loss": 2.1587,
+ "step": 32
+ },
+ {
+ "epoch": 0.03300928386108593,
+ "grad_norm": 0.9382303953170776,
+ "learning_rate": 0.00019994017525773913,
+ "loss": 1.869,
+ "step": 33
+ },
+ {
+ "epoch": 0.03400956519020975,
+ "grad_norm": 1.0526461601257324,
+ "learning_rate": 0.00019993649496991705,
+ "loss": 1.9045,
+ "step": 34
+ },
+ {
+ "epoch": 0.03500984651933356,
+ "grad_norm": 0.8510498404502869,
+ "learning_rate": 0.00019993270486996046,
+ "loss": 2.1005,
+ "step": 35
+ },
+ {
+ "epoch": 0.03601012784845738,
+ "grad_norm": 0.9990401268005371,
+ "learning_rate": 0.000199928804962034,
+ "loss": 1.8569,
+ "step": 36
+ },
+ {
+ "epoch": 0.037010409177581194,
+ "grad_norm": 0.9243854284286499,
+ "learning_rate": 0.00019992479525042303,
+ "loss": 1.9666,
+ "step": 37
+ },
+ {
+ "epoch": 0.03801069050670501,
+ "grad_norm": 0.7774227261543274,
+ "learning_rate": 0.00019992067573953342,
+ "loss": 2.0376,
+ "step": 38
+ },
+ {
+ "epoch": 0.03901097183582883,
+ "grad_norm": 0.8114833235740662,
+ "learning_rate": 0.0001999164464338918,
+ "loss": 2.1608,
+ "step": 39
+ },
+ {
+ "epoch": 0.040011253164952645,
+ "grad_norm": 0.8716320395469666,
+ "learning_rate": 0.0001999121073381454,
+ "loss": 2.0743,
+ "step": 40
+ },
+ {
+ "epoch": 0.041011534494076456,
+ "grad_norm": 0.9571239948272705,
+ "learning_rate": 0.0001999076584570621,
+ "loss": 2.0128,
+ "step": 41
+ },
+ {
+ "epoch": 0.04201181582320027,
+ "grad_norm": 1.038691520690918,
+ "learning_rate": 0.00019990309979553045,
+ "loss": 1.976,
+ "step": 42
+ },
+ {
+ "epoch": 0.04301209715232409,
+ "grad_norm": 1.0576292276382446,
+ "learning_rate": 0.00019989843135855958,
+ "loss": 1.94,
+ "step": 43
+ },
+ {
+ "epoch": 0.04401237848144791,
+ "grad_norm": 1.0991204977035522,
+ "learning_rate": 0.00019989365315127922,
+ "loss": 1.9397,
+ "step": 44
+ },
+ {
+ "epoch": 0.045012659810571724,
+ "grad_norm": 0.9268686175346375,
+ "learning_rate": 0.0001998887651789398,
+ "loss": 1.9305,
+ "step": 45
+ },
+ {
+ "epoch": 0.04601294113969554,
+ "grad_norm": 0.8459104299545288,
+ "learning_rate": 0.0001998837674469123,
+ "loss": 1.7941,
+ "step": 46
+ },
+ {
+ "epoch": 0.04701322246881936,
+ "grad_norm": 0.9260527491569519,
+ "learning_rate": 0.00019987865996068833,
+ "loss": 1.8843,
+ "step": 47
+ },
+ {
+ "epoch": 0.04801350379794317,
+ "grad_norm": 0.8370497226715088,
+ "learning_rate": 0.00019987344272588006,
+ "loss": 1.8779,
+ "step": 48
+ },
+ {
+ "epoch": 0.049013785127066986,
+ "grad_norm": 0.9228008389472961,
+ "learning_rate": 0.00019986811574822033,
+ "loss": 2.1713,
+ "step": 49
+ },
+ {
+ "epoch": 0.0500140664561908,
+ "grad_norm": 1.013746738433838,
+ "learning_rate": 0.00019986267903356254,
+ "loss": 2.1443,
+ "step": 50
+ },
+ {
+ "epoch": 0.05101434778531462,
+ "grad_norm": 1.0155737400054932,
+ "learning_rate": 0.0001998571325878806,
+ "loss": 1.9679,
+ "step": 51
+ },
+ {
+ "epoch": 0.05201462911443844,
+ "grad_norm": 0.9591345191001892,
+ "learning_rate": 0.0001998514764172691,
+ "loss": 2.0611,
+ "step": 52
+ },
+ {
+ "epoch": 0.053014910443562255,
+ "grad_norm": 0.9030050039291382,
+ "learning_rate": 0.00019984571052794313,
+ "loss": 1.9698,
+ "step": 53
+ },
+ {
+ "epoch": 0.054015191772686065,
+ "grad_norm": 0.7697799205780029,
+ "learning_rate": 0.00019983983492623833,
+ "loss": 2.0609,
+ "step": 54
+ },
+ {
+ "epoch": 0.05501547310180988,
+ "grad_norm": 0.8806005716323853,
+ "learning_rate": 0.00019983384961861096,
+ "loss": 1.9756,
+ "step": 55
+ },
+ {
+ "epoch": 0.0560157544309337,
+ "grad_norm": 0.9424449801445007,
+ "learning_rate": 0.0001998277546116378,
+ "loss": 2.0913,
+ "step": 56
+ },
+ {
+ "epoch": 0.05701603576005752,
+ "grad_norm": 1.139495849609375,
+ "learning_rate": 0.00019982154991201608,
+ "loss": 2.2524,
+ "step": 57
+ },
+ {
+ "epoch": 0.058016317089181334,
+ "grad_norm": 1.094347357749939,
+ "learning_rate": 0.00019981523552656377,
+ "loss": 1.8501,
+ "step": 58
+ },
+ {
+ "epoch": 0.05901659841830515,
+ "grad_norm": 1.1519278287887573,
+ "learning_rate": 0.00019980881146221914,
+ "loss": 1.9866,
+ "step": 59
+ },
+ {
+ "epoch": 0.06001687974742896,
+ "grad_norm": 1.2018250226974487,
+ "learning_rate": 0.00019980227772604112,
+ "loss": 1.8226,
+ "step": 60
+ },
+ {
+ "epoch": 0.06101716107655278,
+ "grad_norm": 0.9565753936767578,
+ "learning_rate": 0.0001997956343252091,
+ "loss": 1.8434,
+ "step": 61
+ },
+ {
+ "epoch": 0.062017442405676595,
+ "grad_norm": 1.0832768678665161,
+ "learning_rate": 0.00019978888126702296,
+ "loss": 2.1271,
+ "step": 62
+ },
+ {
+ "epoch": 0.06301772373480041,
+ "grad_norm": 0.8973837494850159,
+ "learning_rate": 0.00019978201855890308,
+ "loss": 1.8331,
+ "step": 63
+ },
+ {
+ "epoch": 0.06401800506392423,
+ "grad_norm": 0.8754604458808899,
+ "learning_rate": 0.00019977504620839035,
+ "loss": 2.1379,
+ "step": 64
+ },
+ {
+ "epoch": 0.06501828639304805,
+ "grad_norm": 0.8244839310646057,
+ "learning_rate": 0.00019976796422314615,
+ "loss": 1.8431,
+ "step": 65
+ },
+ {
+ "epoch": 0.06601856772217186,
+ "grad_norm": 0.8213551044464111,
+ "learning_rate": 0.00019976077261095226,
+ "loss": 1.9155,
+ "step": 66
+ },
+ {
+ "epoch": 0.06701884905129568,
+ "grad_norm": 0.9140985608100891,
+ "learning_rate": 0.00019975347137971098,
+ "loss": 2.0651,
+ "step": 67
+ },
+ {
+ "epoch": 0.0680191303804195,
+ "grad_norm": 0.8518921732902527,
+ "learning_rate": 0.00019974606053744503,
+ "loss": 1.8197,
+ "step": 68
+ },
+ {
+ "epoch": 0.06901941170954332,
+ "grad_norm": 0.8397145867347717,
+ "learning_rate": 0.00019973854009229763,
+ "loss": 1.8621,
+ "step": 69
+ },
+ {
+ "epoch": 0.07001969303866712,
+ "grad_norm": 0.8727964162826538,
+ "learning_rate": 0.00019973091005253232,
+ "loss": 1.762,
+ "step": 70
+ },
+ {
+ "epoch": 0.07101997436779094,
+ "grad_norm": 0.9284623265266418,
+ "learning_rate": 0.0001997231704265332,
+ "loss": 1.8675,
+ "step": 71
+ },
+ {
+ "epoch": 0.07202025569691475,
+ "grad_norm": 0.8280015587806702,
+ "learning_rate": 0.00019971532122280464,
+ "loss": 1.931,
+ "step": 72
+ },
+ {
+ "epoch": 0.07302053702603857,
+ "grad_norm": 0.7591394186019897,
+ "learning_rate": 0.0001997073624499716,
+ "loss": 1.8485,
+ "step": 73
+ },
+ {
+ "epoch": 0.07402081835516239,
+ "grad_norm": 0.975128710269928,
+ "learning_rate": 0.0001996992941167792,
+ "loss": 2.0784,
+ "step": 74
+ },
+ {
+ "epoch": 0.0750210996842862,
+ "grad_norm": 0.8034948110580444,
+ "learning_rate": 0.00019969111623209323,
+ "loss": 1.9849,
+ "step": 75
+ },
+ {
+ "epoch": 0.07602138101341002,
+ "grad_norm": 0.8540483713150024,
+ "learning_rate": 0.00019968282880489957,
+ "loss": 1.7832,
+ "step": 76
+ },
+ {
+ "epoch": 0.07702166234253384,
+ "grad_norm": 0.8181695342063904,
+ "learning_rate": 0.00019967443184430467,
+ "loss": 1.944,
+ "step": 77
+ },
+ {
+ "epoch": 0.07802194367165766,
+ "grad_norm": 0.8446747064590454,
+ "learning_rate": 0.0001996659253595353,
+ "loss": 1.8508,
+ "step": 78
+ },
+ {
+ "epoch": 0.07902222500078147,
+ "grad_norm": 0.8280364871025085,
+ "learning_rate": 0.0001996573093599385,
+ "loss": 1.843,
+ "step": 79
+ },
+ {
+ "epoch": 0.08002250632990529,
+ "grad_norm": 0.8016006350517273,
+ "learning_rate": 0.00019964858385498172,
+ "loss": 1.9368,
+ "step": 80
+ },
+ {
+ "epoch": 0.08102278765902911,
+ "grad_norm": 0.8450536131858826,
+ "learning_rate": 0.00019963974885425266,
+ "loss": 1.9736,
+ "step": 81
+ },
+ {
+ "epoch": 0.08202306898815291,
+ "grad_norm": 0.9172171950340271,
+ "learning_rate": 0.00019963080436745945,
+ "loss": 1.9382,
+ "step": 82
+ },
+ {
+ "epoch": 0.08302335031727673,
+ "grad_norm": 0.8581916689872742,
+ "learning_rate": 0.00019962175040443044,
+ "loss": 2.224,
+ "step": 83
+ },
+ {
+ "epoch": 0.08402363164640055,
+ "grad_norm": 0.9350367188453674,
+ "learning_rate": 0.0001996125869751143,
+ "loss": 2.0519,
+ "step": 84
+ },
+ {
+ "epoch": 0.08502391297552436,
+ "grad_norm": 0.9276247620582581,
+ "learning_rate": 0.00019960331408957997,
+ "loss": 1.9657,
+ "step": 85
+ },
+ {
+ "epoch": 0.08602419430464818,
+ "grad_norm": 0.871574342250824,
+ "learning_rate": 0.00019959393175801671,
+ "loss": 1.9399,
+ "step": 86
+ },
+ {
+ "epoch": 0.087024475633772,
+ "grad_norm": 1.0662888288497925,
+ "learning_rate": 0.00019958443999073397,
+ "loss": 1.9089,
+ "step": 87
+ },
+ {
+ "epoch": 0.08802475696289581,
+ "grad_norm": 0.8258713483810425,
+ "learning_rate": 0.00019957483879816151,
+ "loss": 1.839,
+ "step": 88
+ },
+ {
+ "epoch": 0.08902503829201963,
+ "grad_norm": 0.8154664039611816,
+ "learning_rate": 0.00019956512819084928,
+ "loss": 1.8409,
+ "step": 89
+ },
+ {
+ "epoch": 0.09002531962114345,
+ "grad_norm": 0.8584638833999634,
+ "learning_rate": 0.00019955530817946748,
+ "loss": 1.9521,
+ "step": 90
+ },
+ {
+ "epoch": 0.09102560095026727,
+ "grad_norm": 0.7917523384094238,
+ "learning_rate": 0.00019954537877480655,
+ "loss": 1.9495,
+ "step": 91
+ },
+ {
+ "epoch": 0.09202588227939108,
+ "grad_norm": 1.0129039287567139,
+ "learning_rate": 0.00019953533998777706,
+ "loss": 1.949,
+ "step": 92
+ },
+ {
+ "epoch": 0.0930261636085149,
+ "grad_norm": 0.8677986264228821,
+ "learning_rate": 0.00019952519182940993,
+ "loss": 1.7875,
+ "step": 93
+ },
+ {
+ "epoch": 0.09402644493763872,
+ "grad_norm": 0.8848614692687988,
+ "learning_rate": 0.00019951493431085603,
+ "loss": 2.0675,
+ "step": 94
+ },
+ {
+ "epoch": 0.09502672626676252,
+ "grad_norm": 0.9936463832855225,
+ "learning_rate": 0.00019950456744338658,
+ "loss": 1.6761,
+ "step": 95
+ },
+ {
+ "epoch": 0.09602700759588634,
+ "grad_norm": 1.0520148277282715,
+ "learning_rate": 0.00019949409123839288,
+ "loss": 2.081,
+ "step": 96
+ },
+ {
+ "epoch": 0.09702728892501016,
+ "grad_norm": 0.8061773180961609,
+ "learning_rate": 0.00019948350570738642,
+ "loss": 1.7281,
+ "step": 97
+ },
+ {
+ "epoch": 0.09802757025413397,
+ "grad_norm": 0.7642756104469299,
+ "learning_rate": 0.0001994728108619987,
+ "loss": 2.0032,
+ "step": 98
+ },
+ {
+ "epoch": 0.09902785158325779,
+ "grad_norm": 0.8541550040245056,
+ "learning_rate": 0.0001994620067139815,
+ "loss": 2.1136,
+ "step": 99
+ },
+ {
+ "epoch": 0.1000281329123816,
+ "grad_norm": 0.7868679761886597,
+ "learning_rate": 0.00019945109327520658,
+ "loss": 1.8695,
+ "step": 100
+ },
+ {
+ "epoch": 0.10102841424150542,
+ "grad_norm": 0.8776901364326477,
+ "learning_rate": 0.00019944007055766586,
+ "loss": 1.9786,
+ "step": 101
+ },
+ {
+ "epoch": 0.10202869557062924,
+ "grad_norm": 0.9013833999633789,
+ "learning_rate": 0.00019942893857347128,
+ "loss": 2.1466,
+ "step": 102
+ },
+ {
+ "epoch": 0.10302897689975306,
+ "grad_norm": 0.957558274269104,
+ "learning_rate": 0.00019941769733485494,
+ "loss": 2.0473,
+ "step": 103
+ },
+ {
+ "epoch": 0.10402925822887688,
+ "grad_norm": 0.8921108841896057,
+ "learning_rate": 0.00019940634685416888,
+ "loss": 1.7882,
+ "step": 104
+ },
+ {
+ "epoch": 0.10502953955800069,
+ "grad_norm": 0.896019697189331,
+ "learning_rate": 0.00019939488714388524,
+ "loss": 1.8811,
+ "step": 105
+ },
+ {
+ "epoch": 0.10602982088712451,
+ "grad_norm": 0.8792067766189575,
+ "learning_rate": 0.00019938331821659614,
+ "loss": 1.8624,
+ "step": 106
+ },
+ {
+ "epoch": 0.10703010221624833,
+ "grad_norm": 0.8739930391311646,
+ "learning_rate": 0.0001993716400850138,
+ "loss": 1.8105,
+ "step": 107
+ },
+ {
+ "epoch": 0.10803038354537213,
+ "grad_norm": 0.7678424715995789,
+ "learning_rate": 0.0001993598527619703,
+ "loss": 1.8772,
+ "step": 108
+ },
+ {
+ "epoch": 0.10903066487449595,
+ "grad_norm": 0.8718745112419128,
+ "learning_rate": 0.00019934795626041783,
+ "loss": 1.8236,
+ "step": 109
+ },
+ {
+ "epoch": 0.11003094620361976,
+ "grad_norm": 0.8467247486114502,
+ "learning_rate": 0.0001993359505934285,
+ "loss": 1.8188,
+ "step": 110
+ },
+ {
+ "epoch": 0.11103122753274358,
+ "grad_norm": 0.8685783743858337,
+ "learning_rate": 0.00019932383577419432,
+ "loss": 2.0775,
+ "step": 111
+ },
+ {
+ "epoch": 0.1120315088618674,
+ "grad_norm": 0.7799698710441589,
+ "learning_rate": 0.0001993116118160273,
+ "loss": 1.6489,
+ "step": 112
+ },
+ {
+ "epoch": 0.11303179019099122,
+ "grad_norm": 0.7900094389915466,
+ "learning_rate": 0.00019929927873235938,
+ "loss": 1.8332,
+ "step": 113
+ },
+ {
+ "epoch": 0.11403207152011503,
+ "grad_norm": 0.9433258771896362,
+ "learning_rate": 0.00019928683653674237,
+ "loss": 1.9331,
+ "step": 114
+ },
+ {
+ "epoch": 0.11503235284923885,
+ "grad_norm": 0.8861056566238403,
+ "learning_rate": 0.00019927428524284805,
+ "loss": 1.9135,
+ "step": 115
+ },
+ {
+ "epoch": 0.11603263417836267,
+ "grad_norm": 0.8566756844520569,
+ "learning_rate": 0.00019926162486446792,
+ "loss": 1.9874,
+ "step": 116
+ },
+ {
+ "epoch": 0.11703291550748648,
+ "grad_norm": 0.6897929310798645,
+ "learning_rate": 0.0001992488554155135,
+ "loss": 1.946,
+ "step": 117
+ },
+ {
+ "epoch": 0.1180331968366103,
+ "grad_norm": 0.7807729244232178,
+ "learning_rate": 0.00019923597691001615,
+ "loss": 1.8127,
+ "step": 118
+ },
+ {
+ "epoch": 0.11903347816573412,
+ "grad_norm": 0.7572523355484009,
+ "learning_rate": 0.0001992229893621269,
+ "loss": 1.7768,
+ "step": 119
+ },
+ {
+ "epoch": 0.12003375949485792,
+ "grad_norm": 0.7393172979354858,
+ "learning_rate": 0.00019920989278611687,
+ "loss": 1.894,
+ "step": 120
+ },
+ {
+ "epoch": 0.12103404082398174,
+ "grad_norm": 0.866576611995697,
+ "learning_rate": 0.0001991966871963767,
+ "loss": 1.9285,
+ "step": 121
+ },
+ {
+ "epoch": 0.12203432215310556,
+ "grad_norm": 0.7326533794403076,
+ "learning_rate": 0.000199183372607417,
+ "loss": 1.9309,
+ "step": 122
+ },
+ {
+ "epoch": 0.12303460348222937,
+ "grad_norm": 0.7655537724494934,
+ "learning_rate": 0.0001991699490338681,
+ "loss": 2.1145,
+ "step": 123
+ },
+ {
+ "epoch": 0.12403488481135319,
+ "grad_norm": 0.9714633226394653,
+ "learning_rate": 0.00019915641649048005,
+ "loss": 2.0341,
+ "step": 124
+ },
+ {
+ "epoch": 0.12503516614047702,
+ "grad_norm": 0.8542420864105225,
+ "learning_rate": 0.0001991427749921227,
+ "loss": 2.1426,
+ "step": 125
+ },
+ {
+ "epoch": 0.12603544746960083,
+ "grad_norm": 0.8286274671554565,
+ "learning_rate": 0.00019912902455378556,
+ "loss": 1.8452,
+ "step": 126
+ },
+ {
+ "epoch": 0.12703572879872463,
+ "grad_norm": 0.8823768496513367,
+ "learning_rate": 0.00019911516519057788,
+ "loss": 1.8651,
+ "step": 127
+ },
+ {
+ "epoch": 0.12803601012784846,
+ "grad_norm": 0.7301567196846008,
+ "learning_rate": 0.00019910119691772863,
+ "loss": 1.7776,
+ "step": 128
+ },
+ {
+ "epoch": 0.12903629145697226,
+ "grad_norm": 0.8402552604675293,
+ "learning_rate": 0.00019908711975058637,
+ "loss": 1.8617,
+ "step": 129
+ },
+ {
+ "epoch": 0.1300365727860961,
+ "grad_norm": 0.814500093460083,
+ "learning_rate": 0.0001990729337046194,
+ "loss": 1.9156,
+ "step": 130
+ },
+ {
+ "epoch": 0.1310368541152199,
+ "grad_norm": 0.8262699246406555,
+ "learning_rate": 0.0001990586387954156,
+ "loss": 1.8659,
+ "step": 131
+ },
+ {
+ "epoch": 0.13203713544434373,
+ "grad_norm": 0.8846324682235718,
+ "learning_rate": 0.00019904423503868247,
+ "loss": 2.043,
+ "step": 132
+ },
+ {
+ "epoch": 0.13303741677346753,
+ "grad_norm": 0.8757227659225464,
+ "learning_rate": 0.00019902972245024715,
+ "loss": 1.9217,
+ "step": 133
+ },
+ {
+ "epoch": 0.13403769810259136,
+ "grad_norm": 0.8476879596710205,
+ "learning_rate": 0.00019901510104605637,
+ "loss": 1.8892,
+ "step": 134
+ },
+ {
+ "epoch": 0.13503797943171517,
+ "grad_norm": 0.7707583904266357,
+ "learning_rate": 0.00019900037084217637,
+ "loss": 1.787,
+ "step": 135
+ },
+ {
+ "epoch": 0.136038260760839,
+ "grad_norm": 0.7389562129974365,
+ "learning_rate": 0.00019898553185479303,
+ "loss": 1.5854,
+ "step": 136
+ },
+ {
+ "epoch": 0.1370385420899628,
+ "grad_norm": 0.7331375479698181,
+ "learning_rate": 0.00019897058410021167,
+ "loss": 1.997,
+ "step": 137
+ },
+ {
+ "epoch": 0.13803882341908663,
+ "grad_norm": 0.7219388484954834,
+ "learning_rate": 0.00019895552759485722,
+ "loss": 1.8337,
+ "step": 138
+ },
+ {
+ "epoch": 0.13903910474821043,
+ "grad_norm": 0.8535702228546143,
+ "learning_rate": 0.00019894036235527395,
+ "loss": 1.7818,
+ "step": 139
+ },
+ {
+ "epoch": 0.14003938607733424,
+ "grad_norm": 0.7627841830253601,
+ "learning_rate": 0.00019892508839812584,
+ "loss": 1.8173,
+ "step": 140
+ },
+ {
+ "epoch": 0.14103966740645807,
+ "grad_norm": 0.8397619724273682,
+ "learning_rate": 0.00019890970574019617,
+ "loss": 1.8735,
+ "step": 141
+ },
+ {
+ "epoch": 0.14203994873558187,
+ "grad_norm": 0.8093482851982117,
+ "learning_rate": 0.00019889421439838763,
+ "loss": 1.9918,
+ "step": 142
+ },
+ {
+ "epoch": 0.1430402300647057,
+ "grad_norm": 0.8853684067726135,
+ "learning_rate": 0.00019887861438972246,
+ "loss": 1.5825,
+ "step": 143
+ },
+ {
+ "epoch": 0.1440405113938295,
+ "grad_norm": 0.7413788437843323,
+ "learning_rate": 0.00019886290573134228,
+ "loss": 1.9068,
+ "step": 144
+ },
+ {
+ "epoch": 0.14504079272295334,
+ "grad_norm": 0.7924477458000183,
+ "learning_rate": 0.000198847088440508,
+ "loss": 1.8582,
+ "step": 145
+ },
+ {
+ "epoch": 0.14604107405207714,
+ "grad_norm": 0.8679131865501404,
+ "learning_rate": 0.0001988311625346,
+ "loss": 1.7104,
+ "step": 146
+ },
+ {
+ "epoch": 0.14704135538120097,
+ "grad_norm": 0.7480150461196899,
+ "learning_rate": 0.00019881512803111796,
+ "loss": 1.7288,
+ "step": 147
+ },
+ {
+ "epoch": 0.14804163671032478,
+ "grad_norm": 0.8382390737533569,
+ "learning_rate": 0.00019879898494768093,
+ "loss": 1.8004,
+ "step": 148
+ },
+ {
+ "epoch": 0.1490419180394486,
+ "grad_norm": 0.7360037565231323,
+ "learning_rate": 0.00019878273330202717,
+ "loss": 1.85,
+ "step": 149
+ },
+ {
+ "epoch": 0.1500421993685724,
+ "grad_norm": 0.9644019603729248,
+ "learning_rate": 0.00019876637311201433,
+ "loss": 2.065,
+ "step": 150
+ },
+ {
+ "epoch": 0.15104248069769624,
+ "grad_norm": 0.8116248250007629,
+ "learning_rate": 0.00019874990439561934,
+ "loss": 1.702,
+ "step": 151
+ },
+ {
+ "epoch": 0.15204276202682004,
+ "grad_norm": 0.9301722645759583,
+ "learning_rate": 0.0001987333271709383,
+ "loss": 1.8089,
+ "step": 152
+ },
+ {
+ "epoch": 0.15304304335594385,
+ "grad_norm": 0.7991555333137512,
+ "learning_rate": 0.00019871664145618657,
+ "loss": 1.8227,
+ "step": 153
+ },
+ {
+ "epoch": 0.15404332468506768,
+ "grad_norm": 0.8676092028617859,
+ "learning_rate": 0.00019869984726969878,
+ "loss": 1.7253,
+ "step": 154
+ },
+ {
+ "epoch": 0.15504360601419148,
+ "grad_norm": 0.8022972941398621,
+ "learning_rate": 0.00019868294462992866,
+ "loss": 1.8766,
+ "step": 155
+ },
+ {
+ "epoch": 0.1560438873433153,
+ "grad_norm": 1.128886103630066,
+ "learning_rate": 0.00019866593355544922,
+ "loss": 2.0197,
+ "step": 156
+ },
+ {
+ "epoch": 0.15704416867243912,
+ "grad_norm": 0.7420483827590942,
+ "learning_rate": 0.00019864881406495246,
+ "loss": 1.8825,
+ "step": 157
+ },
+ {
+ "epoch": 0.15804445000156295,
+ "grad_norm": 0.7797536849975586,
+ "learning_rate": 0.00019863158617724967,
+ "loss": 1.8892,
+ "step": 158
+ },
+ {
+ "epoch": 0.15904473133068675,
+ "grad_norm": 0.6859965324401855,
+ "learning_rate": 0.00019861424991127115,
+ "loss": 1.8424,
+ "step": 159
+ },
+ {
+ "epoch": 0.16004501265981058,
+ "grad_norm": 0.8115108609199524,
+ "learning_rate": 0.00019859680528606637,
+ "loss": 1.8394,
+ "step": 160
+ },
+ {
+ "epoch": 0.16104529398893438,
+ "grad_norm": 0.9756322503089905,
+ "learning_rate": 0.00019857925232080373,
+ "loss": 1.726,
+ "step": 161
+ },
+ {
+ "epoch": 0.16204557531805822,
+ "grad_norm": 0.8894350528717041,
+ "learning_rate": 0.00019856159103477086,
+ "loss": 1.8893,
+ "step": 162
+ },
+ {
+ "epoch": 0.16304585664718202,
+ "grad_norm": 0.8075819611549377,
+ "learning_rate": 0.00019854382144737426,
+ "loss": 1.6596,
+ "step": 163
+ },
+ {
+ "epoch": 0.16404613797630582,
+ "grad_norm": 0.8861923813819885,
+ "learning_rate": 0.00019852594357813952,
+ "loss": 1.9352,
+ "step": 164
+ },
+ {
+ "epoch": 0.16504641930542965,
+ "grad_norm": 0.8511936068534851,
+ "learning_rate": 0.00019850795744671116,
+ "loss": 1.9416,
+ "step": 165
+ },
+ {
+ "epoch": 0.16604670063455346,
+ "grad_norm": 0.9425658583641052,
+ "learning_rate": 0.0001984898630728527,
+ "loss": 1.9081,
+ "step": 166
+ },
+ {
+ "epoch": 0.1670469819636773,
+ "grad_norm": 0.7502055168151855,
+ "learning_rate": 0.0001984716604764466,
+ "loss": 1.703,
+ "step": 167
+ },
+ {
+ "epoch": 0.1680472632928011,
+ "grad_norm": 0.9135978817939758,
+ "learning_rate": 0.0001984533496774942,
+ "loss": 1.7641,
+ "step": 168
+ },
+ {
+ "epoch": 0.16904754462192492,
+ "grad_norm": 0.7768126726150513,
+ "learning_rate": 0.0001984349306961158,
+ "loss": 1.7053,
+ "step": 169
+ },
+ {
+ "epoch": 0.17004782595104873,
+ "grad_norm": 0.8106538653373718,
+ "learning_rate": 0.00019841640355255043,
+ "loss": 1.8646,
+ "step": 170
+ },
+ {
+ "epoch": 0.17104810728017256,
+ "grad_norm": 0.7872330546379089,
+ "learning_rate": 0.00019839776826715614,
+ "loss": 1.7814,
+ "step": 171
+ },
+ {
+ "epoch": 0.17204838860929636,
+ "grad_norm": 0.869532585144043,
+ "learning_rate": 0.00019837902486040978,
+ "loss": 1.7812,
+ "step": 172
+ },
+ {
+ "epoch": 0.1730486699384202,
+ "grad_norm": 1.015028715133667,
+ "learning_rate": 0.0001983601733529069,
+ "loss": 1.9432,
+ "step": 173
+ },
+ {
+ "epoch": 0.174048951267544,
+ "grad_norm": 0.800183117389679,
+ "learning_rate": 0.00019834121376536187,
+ "loss": 1.758,
+ "step": 174
+ },
+ {
+ "epoch": 0.17504923259666783,
+ "grad_norm": 0.7427104711532593,
+ "learning_rate": 0.00019832214611860793,
+ "loss": 1.6476,
+ "step": 175
+ },
+ {
+ "epoch": 0.17604951392579163,
+ "grad_norm": 0.8289130926132202,
+ "learning_rate": 0.00019830297043359692,
+ "loss": 1.7702,
+ "step": 176
+ },
+ {
+ "epoch": 0.17704979525491543,
+ "grad_norm": 0.8298771977424622,
+ "learning_rate": 0.00019828368673139947,
+ "loss": 1.7515,
+ "step": 177
+ },
+ {
+ "epoch": 0.17805007658403926,
+ "grad_norm": 0.7602815628051758,
+ "learning_rate": 0.0001982642950332049,
+ "loss": 1.7545,
+ "step": 178
+ },
+ {
+ "epoch": 0.17905035791316307,
+ "grad_norm": 0.8110321164131165,
+ "learning_rate": 0.00019824479536032112,
+ "loss": 2.2604,
+ "step": 179
+ },
+ {
+ "epoch": 0.1800506392422869,
+ "grad_norm": 0.882273256778717,
+ "learning_rate": 0.0001982251877341748,
+ "loss": 1.8133,
+ "step": 180
+ },
+ {
+ "epoch": 0.1810509205714107,
+ "grad_norm": 0.9015639424324036,
+ "learning_rate": 0.00019820547217631117,
+ "loss": 1.7282,
+ "step": 181
+ },
+ {
+ "epoch": 0.18205120190053453,
+ "grad_norm": 0.9231659173965454,
+ "learning_rate": 0.00019818564870839405,
+ "loss": 1.9094,
+ "step": 182
+ },
+ {
+ "epoch": 0.18305148322965833,
+ "grad_norm": 0.8110967874526978,
+ "learning_rate": 0.00019816571735220583,
+ "loss": 1.886,
+ "step": 183
+ },
+ {
+ "epoch": 0.18405176455878217,
+ "grad_norm": 0.7670036554336548,
+ "learning_rate": 0.00019814567812964748,
+ "loss": 1.9895,
+ "step": 184
+ },
+ {
+ "epoch": 0.18505204588790597,
+ "grad_norm": 0.7955975532531738,
+ "learning_rate": 0.00019812553106273847,
+ "loss": 1.8127,
+ "step": 185
+ },
+ {
+ "epoch": 0.1860523272170298,
+ "grad_norm": 0.8790062665939331,
+ "learning_rate": 0.00019810527617361681,
+ "loss": 1.8899,
+ "step": 186
+ },
+ {
+ "epoch": 0.1870526085461536,
+ "grad_norm": 0.8818586468696594,
+ "learning_rate": 0.00019808491348453894,
+ "loss": 1.7707,
+ "step": 187
+ },
+ {
+ "epoch": 0.18805288987527743,
+ "grad_norm": 0.746442437171936,
+ "learning_rate": 0.00019806444301787978,
+ "loss": 1.7281,
+ "step": 188
+ },
+ {
+ "epoch": 0.18905317120440124,
+ "grad_norm": 0.7786905169487,
+ "learning_rate": 0.0001980438647961327,
+ "loss": 1.7317,
+ "step": 189
+ },
+ {
+ "epoch": 0.19005345253352504,
+ "grad_norm": 0.9338862299919128,
+ "learning_rate": 0.00019802317884190935,
+ "loss": 1.9548,
+ "step": 190
+ },
+ {
+ "epoch": 0.19105373386264887,
+ "grad_norm": 0.7416581511497498,
+ "learning_rate": 0.00019800238517793996,
+ "loss": 1.8601,
+ "step": 191
+ },
+ {
+ "epoch": 0.19205401519177268,
+ "grad_norm": 0.6782898902893066,
+ "learning_rate": 0.00019798148382707296,
+ "loss": 1.8477,
+ "step": 192
+ },
+ {
+ "epoch": 0.1930542965208965,
+ "grad_norm": 0.7389237880706787,
+ "learning_rate": 0.00019796047481227515,
+ "loss": 1.7749,
+ "step": 193
+ },
+ {
+ "epoch": 0.1940545778500203,
+ "grad_norm": 0.9711095094680786,
+ "learning_rate": 0.00019793935815663163,
+ "loss": 2.0899,
+ "step": 194
+ },
+ {
+ "epoch": 0.19505485917914414,
+ "grad_norm": 0.7949391007423401,
+ "learning_rate": 0.00019791813388334581,
+ "loss": 1.8778,
+ "step": 195
+ },
+ {
+ "epoch": 0.19605514050826794,
+ "grad_norm": 0.8871057033538818,
+ "learning_rate": 0.00019789680201573933,
+ "loss": 1.7511,
+ "step": 196
+ },
+ {
+ "epoch": 0.19705542183739178,
+ "grad_norm": 0.8664624094963074,
+ "learning_rate": 0.00019787536257725202,
+ "loss": 1.7232,
+ "step": 197
+ },
+ {
+ "epoch": 0.19805570316651558,
+ "grad_norm": 0.871658980846405,
+ "learning_rate": 0.00019785381559144196,
+ "loss": 1.7987,
+ "step": 198
+ },
+ {
+ "epoch": 0.1990559844956394,
+ "grad_norm": 0.7748361229896545,
+ "learning_rate": 0.00019783216108198542,
+ "loss": 1.9239,
+ "step": 199
+ },
+ {
+ "epoch": 0.2000562658247632,
+ "grad_norm": 0.9393408298492432,
+ "learning_rate": 0.00019781039907267677,
+ "loss": 2.0936,
+ "step": 200
+ },
+ {
+ "epoch": 0.20105654715388704,
+ "grad_norm": 0.8519601225852966,
+ "learning_rate": 0.00019778852958742853,
+ "loss": 1.9108,
+ "step": 201
+ },
+ {
+ "epoch": 0.20205682848301085,
+ "grad_norm": 0.8464863300323486,
+ "learning_rate": 0.00019776655265027127,
+ "loss": 1.897,
+ "step": 202
+ },
+ {
+ "epoch": 0.20305710981213465,
+ "grad_norm": 0.8933351635932922,
+ "learning_rate": 0.00019774446828535371,
+ "loss": 1.8204,
+ "step": 203
+ },
+ {
+ "epoch": 0.20405739114125848,
+ "grad_norm": 0.8305785059928894,
+ "learning_rate": 0.00019772227651694256,
+ "loss": 1.9135,
+ "step": 204
+ },
+ {
+ "epoch": 0.20505767247038229,
+ "grad_norm": 0.8117037415504456,
+ "learning_rate": 0.00019769997736942258,
+ "loss": 1.7585,
+ "step": 205
+ },
+ {
+ "epoch": 0.20605795379950612,
+ "grad_norm": 0.7570348381996155,
+ "learning_rate": 0.00019767757086729647,
+ "loss": 1.8373,
+ "step": 206
+ },
+ {
+ "epoch": 0.20705823512862992,
+ "grad_norm": 0.9291234016418457,
+ "learning_rate": 0.00019765505703518496,
+ "loss": 1.7774,
+ "step": 207
+ },
+ {
+ "epoch": 0.20805851645775375,
+ "grad_norm": 0.8211004137992859,
+ "learning_rate": 0.00019763243589782662,
+ "loss": 1.8766,
+ "step": 208
+ },
+ {
+ "epoch": 0.20905879778687755,
+ "grad_norm": 0.6625431180000305,
+ "learning_rate": 0.00019760970748007803,
+ "loss": 1.628,
+ "step": 209
+ },
+ {
+ "epoch": 0.21005907911600138,
+ "grad_norm": 0.7974782586097717,
+ "learning_rate": 0.0001975868718069136,
+ "loss": 1.6896,
+ "step": 210
+ },
+ {
+ "epoch": 0.2110593604451252,
+ "grad_norm": 0.8364912867546082,
+ "learning_rate": 0.00019756392890342563,
+ "loss": 1.7492,
+ "step": 211
+ },
+ {
+ "epoch": 0.21205964177424902,
+ "grad_norm": 0.8730652332305908,
+ "learning_rate": 0.00019754087879482422,
+ "loss": 1.8295,
+ "step": 212
+ },
+ {
+ "epoch": 0.21305992310337282,
+ "grad_norm": 0.7532863020896912,
+ "learning_rate": 0.00019751772150643722,
+ "loss": 1.8309,
+ "step": 213
+ },
+ {
+ "epoch": 0.21406020443249665,
+ "grad_norm": 0.7375178933143616,
+ "learning_rate": 0.00019749445706371038,
+ "loss": 1.7854,
+ "step": 214
+ },
+ {
+ "epoch": 0.21506048576162046,
+ "grad_norm": 0.7524377703666687,
+ "learning_rate": 0.00019747108549220702,
+ "loss": 1.7683,
+ "step": 215
+ },
+ {
+ "epoch": 0.21606076709074426,
+ "grad_norm": 0.7331809997558594,
+ "learning_rate": 0.00019744760681760832,
+ "loss": 1.7103,
+ "step": 216
+ },
+ {
+ "epoch": 0.2170610484198681,
+ "grad_norm": 0.8083691596984863,
+ "learning_rate": 0.00019742402106571314,
+ "loss": 1.674,
+ "step": 217
+ },
+ {
+ "epoch": 0.2180613297489919,
+ "grad_norm": 0.8524570465087891,
+ "learning_rate": 0.00019740032826243788,
+ "loss": 1.7227,
+ "step": 218
+ },
+ {
+ "epoch": 0.21906161107811573,
+ "grad_norm": 0.7676658630371094,
+ "learning_rate": 0.0001973765284338167,
+ "loss": 1.8561,
+ "step": 219
+ },
+ {
+ "epoch": 0.22006189240723953,
+ "grad_norm": 0.7858710289001465,
+ "learning_rate": 0.00019735262160600127,
+ "loss": 1.7796,
+ "step": 220
+ },
+ {
+ "epoch": 0.22106217373636336,
+ "grad_norm": 0.7587497234344482,
+ "learning_rate": 0.00019732860780526088,
+ "loss": 1.9271,
+ "step": 221
+ },
+ {
+ "epoch": 0.22206245506548716,
+ "grad_norm": 0.8084688186645508,
+ "learning_rate": 0.00019730448705798239,
+ "loss": 1.8176,
+ "step": 222
+ },
+ {
+ "epoch": 0.223062736394611,
+ "grad_norm": 0.6736906170845032,
+ "learning_rate": 0.00019728025939067008,
+ "loss": 1.6288,
+ "step": 223
+ },
+ {
+ "epoch": 0.2240630177237348,
+ "grad_norm": 0.7483925819396973,
+ "learning_rate": 0.00019725592482994583,
+ "loss": 1.8363,
+ "step": 224
+ },
+ {
+ "epoch": 0.22506329905285863,
+ "grad_norm": 1.7995796203613281,
+ "learning_rate": 0.00019723148340254892,
+ "loss": 1.9072,
+ "step": 225
+ },
+ {
+ "epoch": 0.22606358038198243,
+ "grad_norm": 0.8028881549835205,
+ "learning_rate": 0.00019720693513533598,
+ "loss": 1.9021,
+ "step": 226
+ },
+ {
+ "epoch": 0.22706386171110624,
+ "grad_norm": 0.9853909015655518,
+ "learning_rate": 0.00019718228005528122,
+ "loss": 2.0159,
+ "step": 227
+ },
+ {
+ "epoch": 0.22806414304023007,
+ "grad_norm": 0.7784947156906128,
+ "learning_rate": 0.00019715751818947603,
+ "loss": 1.7816,
+ "step": 228
+ },
+ {
+ "epoch": 0.22906442436935387,
+ "grad_norm": 0.7447614669799805,
+ "learning_rate": 0.0001971326495651293,
+ "loss": 1.654,
+ "step": 229
+ },
+ {
+ "epoch": 0.2300647056984777,
+ "grad_norm": 0.8673064112663269,
+ "learning_rate": 0.00019710767420956705,
+ "loss": 2.0049,
+ "step": 230
+ },
+ {
+ "epoch": 0.2310649870276015,
+ "grad_norm": 0.8207747936248779,
+ "learning_rate": 0.0001970825921502328,
+ "loss": 1.9388,
+ "step": 231
+ },
+ {
+ "epoch": 0.23206526835672533,
+ "grad_norm": 0.742266058921814,
+ "learning_rate": 0.0001970574034146871,
+ "loss": 1.7658,
+ "step": 232
+ },
+ {
+ "epoch": 0.23306554968584914,
+ "grad_norm": 0.9097973704338074,
+ "learning_rate": 0.00019703210803060782,
+ "loss": 1.8023,
+ "step": 233
+ },
+ {
+ "epoch": 0.23406583101497297,
+ "grad_norm": 0.7512438297271729,
+ "learning_rate": 0.00019700670602579008,
+ "loss": 1.8551,
+ "step": 234
+ },
+ {
+ "epoch": 0.23506611234409677,
+ "grad_norm": 0.8303943872451782,
+ "learning_rate": 0.00019698119742814606,
+ "loss": 1.7723,
+ "step": 235
+ },
+ {
+ "epoch": 0.2360663936732206,
+ "grad_norm": 0.9195139408111572,
+ "learning_rate": 0.00019695558226570507,
+ "loss": 1.6426,
+ "step": 236
+ },
+ {
+ "epoch": 0.2370666750023444,
+ "grad_norm": 0.7734714150428772,
+ "learning_rate": 0.00019692986056661356,
+ "loss": 1.7798,
+ "step": 237
+ },
+ {
+ "epoch": 0.23806695633146824,
+ "grad_norm": 0.8759648203849792,
+ "learning_rate": 0.00019690403235913504,
+ "loss": 1.6465,
+ "step": 238
+ },
+ {
+ "epoch": 0.23906723766059204,
+ "grad_norm": 0.7688003778457642,
+ "learning_rate": 0.00019687809767165,
+ "loss": 2.0092,
+ "step": 239
+ },
+ {
+ "epoch": 0.24006751898971584,
+ "grad_norm": 0.7398790121078491,
+ "learning_rate": 0.000196852056532656,
+ "loss": 1.8176,
+ "step": 240
+ },
+ {
+ "epoch": 0.24106780031883968,
+ "grad_norm": 0.8921257853507996,
+ "learning_rate": 0.00019682590897076752,
+ "loss": 1.7387,
+ "step": 241
+ },
+ {
+ "epoch": 0.24206808164796348,
+ "grad_norm": 0.7939002513885498,
+ "learning_rate": 0.00019679965501471608,
+ "loss": 1.9417,
+ "step": 242
+ },
+ {
+ "epoch": 0.2430683629770873,
+ "grad_norm": 0.7798025608062744,
+ "learning_rate": 0.0001967732946933499,
+ "loss": 1.7134,
+ "step": 243
+ },
+ {
+ "epoch": 0.2440686443062111,
+ "grad_norm": 0.8007254600524902,
+ "learning_rate": 0.00019674682803563428,
+ "loss": 1.7387,
+ "step": 244
+ },
+ {
+ "epoch": 0.24506892563533494,
+ "grad_norm": 0.6257696151733398,
+ "learning_rate": 0.00019672025507065131,
+ "loss": 1.767,
+ "step": 245
+ },
+ {
+ "epoch": 0.24606920696445875,
+ "grad_norm": 0.7942785620689392,
+ "learning_rate": 0.00019669357582759983,
+ "loss": 1.8801,
+ "step": 246
+ },
+ {
+ "epoch": 0.24706948829358258,
+ "grad_norm": 0.7933829426765442,
+ "learning_rate": 0.00019666679033579552,
+ "loss": 1.9711,
+ "step": 247
+ },
+ {
+ "epoch": 0.24806976962270638,
+ "grad_norm": 0.7489326596260071,
+ "learning_rate": 0.00019663989862467082,
+ "loss": 1.8038,
+ "step": 248
+ },
+ {
+ "epoch": 0.2490700509518302,
+ "grad_norm": 0.7279101014137268,
+ "learning_rate": 0.00019661290072377482,
+ "loss": 1.66,
+ "step": 249
+ },
+ {
+ "epoch": 0.25007033228095404,
+ "grad_norm": 0.6823874115943909,
+ "learning_rate": 0.00019658579666277334,
+ "loss": 1.8064,
+ "step": 250
+ },
+ {
+ "epoch": 0.2510706136100778,
+ "grad_norm": 0.6561273336410522,
+ "learning_rate": 0.0001965585864714488,
+ "loss": 1.6874,
+ "step": 251
+ },
+ {
+ "epoch": 0.25207089493920165,
+ "grad_norm": 0.6457573175430298,
+ "learning_rate": 0.00019653127017970034,
+ "loss": 1.4587,
+ "step": 252
+ },
+ {
+ "epoch": 0.2530711762683255,
+ "grad_norm": 0.7649476528167725,
+ "learning_rate": 0.0001965038478175436,
+ "loss": 1.9811,
+ "step": 253
+ },
+ {
+ "epoch": 0.25407145759744926,
+ "grad_norm": 0.8786829710006714,
+ "learning_rate": 0.00019647631941511082,
+ "loss": 1.8629,
+ "step": 254
+ },
+ {
+ "epoch": 0.2550717389265731,
+ "grad_norm": 0.7038159966468811,
+ "learning_rate": 0.0001964486850026507,
+ "loss": 1.6885,
+ "step": 255
+ },
+ {
+ "epoch": 0.2560720202556969,
+ "grad_norm": 0.7255909442901611,
+ "learning_rate": 0.00019642094461052852,
+ "loss": 1.7335,
+ "step": 256
+ },
+ {
+ "epoch": 0.25707230158482075,
+ "grad_norm": 0.7780727744102478,
+ "learning_rate": 0.00019639309826922585,
+ "loss": 1.899,
+ "step": 257
+ },
+ {
+ "epoch": 0.2580725829139445,
+ "grad_norm": 0.8533650040626526,
+ "learning_rate": 0.0001963651460093409,
+ "loss": 1.7711,
+ "step": 258
+ },
+ {
+ "epoch": 0.25907286424306836,
+ "grad_norm": 0.6440068483352661,
+ "learning_rate": 0.00019633708786158806,
+ "loss": 1.6685,
+ "step": 259
+ },
+ {
+ "epoch": 0.2600731455721922,
+ "grad_norm": 0.6873877048492432,
+ "learning_rate": 0.00019630892385679818,
+ "loss": 1.7502,
+ "step": 260
+ },
+ {
+ "epoch": 0.261073426901316,
+ "grad_norm": 0.7100672721862793,
+ "learning_rate": 0.00019628065402591845,
+ "loss": 1.7789,
+ "step": 261
+ },
+ {
+ "epoch": 0.2620737082304398,
+ "grad_norm": 0.8447420001029968,
+ "learning_rate": 0.00019625227840001225,
+ "loss": 1.8577,
+ "step": 262
+ },
+ {
+ "epoch": 0.2630739895595636,
+ "grad_norm": 0.767888605594635,
+ "learning_rate": 0.0001962237970102593,
+ "loss": 1.5936,
+ "step": 263
+ },
+ {
+ "epoch": 0.26407427088868746,
+ "grad_norm": 0.6955805420875549,
+ "learning_rate": 0.0001961952098879555,
+ "loss": 1.7733,
+ "step": 264
+ },
+ {
+ "epoch": 0.26507455221781123,
+ "grad_norm": 0.777740478515625,
+ "learning_rate": 0.00019616651706451287,
+ "loss": 1.6027,
+ "step": 265
+ },
+ {
+ "epoch": 0.26607483354693506,
+ "grad_norm": 0.7691099047660828,
+ "learning_rate": 0.0001961377185714597,
+ "loss": 1.7457,
+ "step": 266
+ },
+ {
+ "epoch": 0.2670751148760589,
+ "grad_norm": 0.6778420805931091,
+ "learning_rate": 0.0001961088144404403,
+ "loss": 1.7704,
+ "step": 267
+ },
+ {
+ "epoch": 0.2680753962051827,
+ "grad_norm": 0.7943267226219177,
+ "learning_rate": 0.00019607980470321505,
+ "loss": 1.9775,
+ "step": 268
+ },
+ {
+ "epoch": 0.2690756775343065,
+ "grad_norm": 0.6660135388374329,
+ "learning_rate": 0.00019605068939166045,
+ "loss": 1.6556,
+ "step": 269
+ },
+ {
+ "epoch": 0.27007595886343033,
+ "grad_norm": 0.8664935827255249,
+ "learning_rate": 0.00019602146853776894,
+ "loss": 2.03,
+ "step": 270
+ },
+ {
+ "epoch": 0.27107624019255416,
+ "grad_norm": 0.7783074975013733,
+ "learning_rate": 0.000195992142173649,
+ "loss": 1.7426,
+ "step": 271
+ },
+ {
+ "epoch": 0.272076521521678,
+ "grad_norm": 0.7470223903656006,
+ "learning_rate": 0.0001959627103315249,
+ "loss": 1.7284,
+ "step": 272
+ },
+ {
+ "epoch": 0.27307680285080177,
+ "grad_norm": 0.7284931540489197,
+ "learning_rate": 0.00019593317304373705,
+ "loss": 1.6977,
+ "step": 273
+ },
+ {
+ "epoch": 0.2740770841799256,
+ "grad_norm": 0.7201762795448303,
+ "learning_rate": 0.00019590353034274144,
+ "loss": 1.7184,
+ "step": 274
+ },
+ {
+ "epoch": 0.27507736550904943,
+ "grad_norm": 0.6756151914596558,
+ "learning_rate": 0.00019587378226111014,
+ "loss": 1.7276,
+ "step": 275
+ },
+ {
+ "epoch": 0.27607764683817326,
+ "grad_norm": 0.6784201860427856,
+ "learning_rate": 0.00019584392883153088,
+ "loss": 1.642,
+ "step": 276
+ },
+ {
+ "epoch": 0.27707792816729704,
+ "grad_norm": 0.7387176752090454,
+ "learning_rate": 0.00019581397008680717,
+ "loss": 1.7911,
+ "step": 277
+ },
+ {
+ "epoch": 0.27807820949642087,
+ "grad_norm": 0.9367021918296814,
+ "learning_rate": 0.00019578390605985826,
+ "loss": 2.0034,
+ "step": 278
+ },
+ {
+ "epoch": 0.2790784908255447,
+ "grad_norm": 0.803698718547821,
+ "learning_rate": 0.00019575373678371909,
+ "loss": 1.7907,
+ "step": 279
+ },
+ {
+ "epoch": 0.2800787721546685,
+ "grad_norm": 0.7324479818344116,
+ "learning_rate": 0.00019572346229154025,
+ "loss": 1.5539,
+ "step": 280
+ },
+ {
+ "epoch": 0.2810790534837923,
+ "grad_norm": 0.7107382416725159,
+ "learning_rate": 0.00019569308261658787,
+ "loss": 1.838,
+ "step": 281
+ },
+ {
+ "epoch": 0.28207933481291614,
+ "grad_norm": 0.8698626756668091,
+ "learning_rate": 0.00019566259779224378,
+ "loss": 1.7433,
+ "step": 282
+ },
+ {
+ "epoch": 0.28307961614203997,
+ "grad_norm": 0.7804028391838074,
+ "learning_rate": 0.00019563200785200526,
+ "loss": 1.7161,
+ "step": 283
+ },
+ {
+ "epoch": 0.28407989747116374,
+ "grad_norm": 0.8762909173965454,
+ "learning_rate": 0.00019560131282948516,
+ "loss": 1.8031,
+ "step": 284
+ },
+ {
+ "epoch": 0.2850801788002876,
+ "grad_norm": 0.8252436518669128,
+ "learning_rate": 0.0001955705127584117,
+ "loss": 1.6434,
+ "step": 285
+ },
+ {
+ "epoch": 0.2860804601294114,
+ "grad_norm": 0.8220797181129456,
+ "learning_rate": 0.00019553960767262863,
+ "loss": 1.8522,
+ "step": 286
+ },
+ {
+ "epoch": 0.28708074145853524,
+ "grad_norm": 0.7883003950119019,
+ "learning_rate": 0.00019550859760609503,
+ "loss": 1.8245,
+ "step": 287
+ },
+ {
+ "epoch": 0.288081022787659,
+ "grad_norm": 0.9208703637123108,
+ "learning_rate": 0.00019547748259288536,
+ "loss": 1.8877,
+ "step": 288
+ },
+ {
+ "epoch": 0.28908130411678284,
+ "grad_norm": 0.8452202677726746,
+ "learning_rate": 0.0001954462626671894,
+ "loss": 1.554,
+ "step": 289
+ },
+ {
+ "epoch": 0.2900815854459067,
+ "grad_norm": 0.82865971326828,
+ "learning_rate": 0.0001954149378633122,
+ "loss": 1.655,
+ "step": 290
+ },
+ {
+ "epoch": 0.29108186677503045,
+ "grad_norm": 0.7871205806732178,
+ "learning_rate": 0.00019538350821567404,
+ "loss": 1.621,
+ "step": 291
+ },
+ {
+ "epoch": 0.2920821481041543,
+ "grad_norm": 0.8288848996162415,
+ "learning_rate": 0.00019535197375881045,
+ "loss": 1.9277,
+ "step": 292
+ },
+ {
+ "epoch": 0.2930824294332781,
+ "grad_norm": 0.7275516986846924,
+ "learning_rate": 0.00019532033452737205,
+ "loss": 1.7949,
+ "step": 293
+ },
+ {
+ "epoch": 0.29408271076240194,
+ "grad_norm": 0.7424570322036743,
+ "learning_rate": 0.00019528859055612468,
+ "loss": 1.6407,
+ "step": 294
+ },
+ {
+ "epoch": 0.2950829920915257,
+ "grad_norm": 0.7031363248825073,
+ "learning_rate": 0.0001952567418799492,
+ "loss": 1.8793,
+ "step": 295
+ },
+ {
+ "epoch": 0.29608327342064955,
+ "grad_norm": 0.7190185189247131,
+ "learning_rate": 0.00019522478853384155,
+ "loss": 1.6759,
+ "step": 296
+ },
+ {
+ "epoch": 0.2970835547497734,
+ "grad_norm": 0.7270736694335938,
+ "learning_rate": 0.00019519273055291266,
+ "loss": 1.6351,
+ "step": 297
+ },
+ {
+ "epoch": 0.2980838360788972,
+ "grad_norm": 0.8894152641296387,
+ "learning_rate": 0.00019516056797238846,
+ "loss": 1.7908,
+ "step": 298
+ },
+ {
+ "epoch": 0.299084117408021,
+ "grad_norm": 0.9089106321334839,
+ "learning_rate": 0.00019512830082760987,
+ "loss": 1.6018,
+ "step": 299
+ },
+ {
+ "epoch": 0.3000843987371448,
+ "grad_norm": 0.8772429823875427,
+ "learning_rate": 0.00019509592915403255,
+ "loss": 1.8474,
+ "step": 300
+ },
+ {
+ "epoch": 0.30108468006626865,
+ "grad_norm": 0.8244933485984802,
+ "learning_rate": 0.00019506345298722717,
+ "loss": 1.4324,
+ "step": 301
+ },
+ {
+ "epoch": 0.3020849613953925,
+ "grad_norm": 0.7283012866973877,
+ "learning_rate": 0.00019503087236287913,
+ "loss": 1.5115,
+ "step": 302
+ },
+ {
+ "epoch": 0.30308524272451626,
+ "grad_norm": 0.7721333503723145,
+ "learning_rate": 0.00019499818731678873,
+ "loss": 1.6728,
+ "step": 303
+ },
+ {
+ "epoch": 0.3040855240536401,
+ "grad_norm": 0.7579306960105896,
+ "learning_rate": 0.00019496539788487082,
+ "loss": 1.5927,
+ "step": 304
+ },
+ {
+ "epoch": 0.3050858053827639,
+ "grad_norm": 0.9054704308509827,
+ "learning_rate": 0.0001949325041031551,
+ "loss": 1.9027,
+ "step": 305
+ },
+ {
+ "epoch": 0.3060860867118877,
+ "grad_norm": 0.7023262977600098,
+ "learning_rate": 0.0001948995060077859,
+ "loss": 1.7705,
+ "step": 306
+ },
+ {
+ "epoch": 0.3070863680410115,
+ "grad_norm": 0.7942065000534058,
+ "learning_rate": 0.0001948664036350221,
+ "loss": 1.8269,
+ "step": 307
+ },
+ {
+ "epoch": 0.30808664937013536,
+ "grad_norm": 0.9305068850517273,
+ "learning_rate": 0.00019483319702123732,
+ "loss": 1.8247,
+ "step": 308
+ },
+ {
+ "epoch": 0.3090869306992592,
+ "grad_norm": 0.814664900302887,
+ "learning_rate": 0.00019479988620291956,
+ "loss": 1.9179,
+ "step": 309
+ },
+ {
+ "epoch": 0.31008721202838296,
+ "grad_norm": 0.6418014764785767,
+ "learning_rate": 0.00019476647121667137,
+ "loss": 1.5011,
+ "step": 310
+ },
+ {
+ "epoch": 0.3110874933575068,
+ "grad_norm": 0.7911447882652283,
+ "learning_rate": 0.00019473295209920983,
+ "loss": 1.857,
+ "step": 311
+ },
+ {
+ "epoch": 0.3120877746866306,
+ "grad_norm": 0.7792949676513672,
+ "learning_rate": 0.00019469932888736632,
+ "loss": 1.7279,
+ "step": 312
+ },
+ {
+ "epoch": 0.31308805601575446,
+ "grad_norm": 0.7579171657562256,
+ "learning_rate": 0.00019466560161808674,
+ "loss": 1.6902,
+ "step": 313
+ },
+ {
+ "epoch": 0.31408833734487823,
+ "grad_norm": 0.7052372694015503,
+ "learning_rate": 0.00019463177032843124,
+ "loss": 1.7302,
+ "step": 314
+ },
+ {
+ "epoch": 0.31508861867400206,
+ "grad_norm": 0.7188624143600464,
+ "learning_rate": 0.00019459783505557424,
+ "loss": 1.7338,
+ "step": 315
+ },
+ {
+ "epoch": 0.3160889000031259,
+ "grad_norm": 0.6057978272438049,
+ "learning_rate": 0.00019456379583680452,
+ "loss": 1.6123,
+ "step": 316
+ },
+ {
+ "epoch": 0.31708918133224967,
+ "grad_norm": 0.8339365720748901,
+ "learning_rate": 0.000194529652709525,
+ "loss": 1.9765,
+ "step": 317
+ },
+ {
+ "epoch": 0.3180894626613735,
+ "grad_norm": 0.8524260520935059,
+ "learning_rate": 0.00019449540571125286,
+ "loss": 1.6803,
+ "step": 318
+ },
+ {
+ "epoch": 0.31908974399049733,
+ "grad_norm": 0.7035975456237793,
+ "learning_rate": 0.00019446105487961926,
+ "loss": 1.5792,
+ "step": 319
+ },
+ {
+ "epoch": 0.32009002531962116,
+ "grad_norm": 0.7894249558448792,
+ "learning_rate": 0.0001944266002523696,
+ "loss": 1.6326,
+ "step": 320
+ },
+ {
+ "epoch": 0.32109030664874494,
+ "grad_norm": 0.7716989517211914,
+ "learning_rate": 0.0001943920418673633,
+ "loss": 1.6871,
+ "step": 321
+ },
+ {
+ "epoch": 0.32209058797786877,
+ "grad_norm": 0.7914933562278748,
+ "learning_rate": 0.00019435737976257377,
+ "loss": 1.7148,
+ "step": 322
+ },
+ {
+ "epoch": 0.3230908693069926,
+ "grad_norm": 0.7113205790519714,
+ "learning_rate": 0.00019432261397608834,
+ "loss": 1.5236,
+ "step": 323
+ },
+ {
+ "epoch": 0.32409115063611643,
+ "grad_norm": 0.8609917163848877,
+ "learning_rate": 0.00019428774454610843,
+ "loss": 1.8101,
+ "step": 324
+ },
+ {
+ "epoch": 0.3250914319652402,
+ "grad_norm": 0.7319685220718384,
+ "learning_rate": 0.00019425277151094913,
+ "loss": 1.7712,
+ "step": 325
+ },
+ {
+ "epoch": 0.32609171329436404,
+ "grad_norm": 0.6478747725486755,
+ "learning_rate": 0.00019421769490903957,
+ "loss": 1.8535,
+ "step": 326
+ },
+ {
+ "epoch": 0.32709199462348787,
+ "grad_norm": 0.7025763392448425,
+ "learning_rate": 0.0001941825147789225,
+ "loss": 1.9213,
+ "step": 327
+ },
+ {
+ "epoch": 0.32809227595261165,
+ "grad_norm": 0.7595239877700806,
+ "learning_rate": 0.00019414723115925456,
+ "loss": 1.7449,
+ "step": 328
+ },
+ {
+ "epoch": 0.3290925572817355,
+ "grad_norm": 0.7728105783462524,
+ "learning_rate": 0.0001941118440888061,
+ "loss": 1.8821,
+ "step": 329
+ },
+ {
+ "epoch": 0.3300928386108593,
+ "grad_norm": 0.7430977821350098,
+ "learning_rate": 0.0001940763536064611,
+ "loss": 1.6904,
+ "step": 330
+ },
+ {
+ "epoch": 0.33109311993998314,
+ "grad_norm": 0.7909367680549622,
+ "learning_rate": 0.00019404075975121716,
+ "loss": 1.7899,
+ "step": 331
+ },
+ {
+ "epoch": 0.3320934012691069,
+ "grad_norm": 0.7561226487159729,
+ "learning_rate": 0.0001940050625621855,
+ "loss": 1.7746,
+ "step": 332
+ },
+ {
+ "epoch": 0.33309368259823074,
+ "grad_norm": 0.7602452635765076,
+ "learning_rate": 0.00019396926207859084,
+ "loss": 1.7909,
+ "step": 333
+ },
+ {
+ "epoch": 0.3340939639273546,
+ "grad_norm": 0.8194379806518555,
+ "learning_rate": 0.0001939333583397715,
+ "loss": 1.7039,
+ "step": 334
+ },
+ {
+ "epoch": 0.3350942452564784,
+ "grad_norm": 0.7036342024803162,
+ "learning_rate": 0.00019389735138517915,
+ "loss": 1.6663,
+ "step": 335
+ },
+ {
+ "epoch": 0.3360945265856022,
+ "grad_norm": 0.8429521918296814,
+ "learning_rate": 0.00019386124125437895,
+ "loss": 1.589,
+ "step": 336
+ },
+ {
+ "epoch": 0.337094807914726,
+ "grad_norm": 0.7271071076393127,
+ "learning_rate": 0.00019382502798704935,
+ "loss": 1.646,
+ "step": 337
+ },
+ {
+ "epoch": 0.33809508924384984,
+ "grad_norm": 0.7862086892127991,
+ "learning_rate": 0.00019378871162298227,
+ "loss": 1.6085,
+ "step": 338
+ },
+ {
+ "epoch": 0.3390953705729737,
+ "grad_norm": 0.676815390586853,
+ "learning_rate": 0.00019375229220208276,
+ "loss": 1.7335,
+ "step": 339
+ },
+ {
+ "epoch": 0.34009565190209745,
+ "grad_norm": 0.8916042447090149,
+ "learning_rate": 0.00019371576976436917,
+ "loss": 1.7914,
+ "step": 340
+ },
+ {
+ "epoch": 0.3410959332312213,
+ "grad_norm": 0.7913751006126404,
+ "learning_rate": 0.00019367914434997312,
+ "loss": 1.6031,
+ "step": 341
+ },
+ {
+ "epoch": 0.3420962145603451,
+ "grad_norm": 0.7409866452217102,
+ "learning_rate": 0.00019364241599913924,
+ "loss": 1.6525,
+ "step": 342
+ },
+ {
+ "epoch": 0.3430964958894689,
+ "grad_norm": 0.7472705841064453,
+ "learning_rate": 0.0001936055847522254,
+ "loss": 1.6716,
+ "step": 343
+ },
+ {
+ "epoch": 0.3440967772185927,
+ "grad_norm": 0.7030773758888245,
+ "learning_rate": 0.00019356865064970244,
+ "loss": 1.7134,
+ "step": 344
+ },
+ {
+ "epoch": 0.34509705854771655,
+ "grad_norm": 0.6609564423561096,
+ "learning_rate": 0.0001935316137321543,
+ "loss": 1.7127,
+ "step": 345
+ },
+ {
+ "epoch": 0.3460973398768404,
+ "grad_norm": 0.7811393141746521,
+ "learning_rate": 0.00019349447404027782,
+ "loss": 1.75,
+ "step": 346
+ },
+ {
+ "epoch": 0.34709762120596416,
+ "grad_norm": 0.6980521082878113,
+ "learning_rate": 0.00019345723161488283,
+ "loss": 1.82,
+ "step": 347
+ },
+ {
+ "epoch": 0.348097902535088,
+ "grad_norm": 0.749796986579895,
+ "learning_rate": 0.000193419886496892,
+ "loss": 1.9755,
+ "step": 348
+ },
+ {
+ "epoch": 0.3490981838642118,
+ "grad_norm": 0.9486667513847351,
+ "learning_rate": 0.00019338243872734086,
+ "loss": 1.7047,
+ "step": 349
+ },
+ {
+ "epoch": 0.35009846519333565,
+ "grad_norm": 0.8086081147193909,
+ "learning_rate": 0.00019334488834737775,
+ "loss": 1.661,
+ "step": 350
+ },
+ {
+ "epoch": 0.3510987465224594,
+ "grad_norm": 0.700549840927124,
+ "learning_rate": 0.00019330723539826375,
+ "loss": 1.8696,
+ "step": 351
+ },
+ {
+ "epoch": 0.35209902785158326,
+ "grad_norm": 0.7465476393699646,
+ "learning_rate": 0.00019326947992137262,
+ "loss": 1.5444,
+ "step": 352
+ },
+ {
+ "epoch": 0.3530993091807071,
+ "grad_norm": 0.7370999455451965,
+ "learning_rate": 0.00019323162195819082,
+ "loss": 1.8805,
+ "step": 353
+ },
+ {
+ "epoch": 0.35409959050983086,
+ "grad_norm": 0.719359278678894,
+ "learning_rate": 0.0001931936615503174,
+ "loss": 1.8022,
+ "step": 354
+ },
+ {
+ "epoch": 0.3550998718389547,
+ "grad_norm": 0.7301434278488159,
+ "learning_rate": 0.000193155598739464,
+ "loss": 1.6984,
+ "step": 355
+ },
+ {
+ "epoch": 0.3561001531680785,
+ "grad_norm": 0.7191399335861206,
+ "learning_rate": 0.0001931174335674547,
+ "loss": 1.7229,
+ "step": 356
+ },
+ {
+ "epoch": 0.35710043449720236,
+ "grad_norm": 0.7471932768821716,
+ "learning_rate": 0.0001930791660762262,
+ "loss": 1.7408,
+ "step": 357
+ },
+ {
+ "epoch": 0.35810071582632613,
+ "grad_norm": 0.8197934031486511,
+ "learning_rate": 0.00019304079630782752,
+ "loss": 1.6938,
+ "step": 358
+ },
+ {
+ "epoch": 0.35910099715544996,
+ "grad_norm": 0.7408166527748108,
+ "learning_rate": 0.0001930023243044201,
+ "loss": 1.7798,
+ "step": 359
+ },
+ {
+ "epoch": 0.3601012784845738,
+ "grad_norm": 0.7525373101234436,
+ "learning_rate": 0.00019296375010827773,
+ "loss": 1.711,
+ "step": 360
+ },
+ {
+ "epoch": 0.3611015598136976,
+ "grad_norm": 0.6712046265602112,
+ "learning_rate": 0.00019292507376178643,
+ "loss": 1.8157,
+ "step": 361
+ },
+ {
+ "epoch": 0.3621018411428214,
+ "grad_norm": 0.6712916493415833,
+ "learning_rate": 0.00019288629530744454,
+ "loss": 1.8707,
+ "step": 362
+ },
+ {
+ "epoch": 0.36310212247194523,
+ "grad_norm": 0.6127772331237793,
+ "learning_rate": 0.0001928474147878626,
+ "loss": 1.4743,
+ "step": 363
+ },
+ {
+ "epoch": 0.36410240380106906,
+ "grad_norm": 0.910310685634613,
+ "learning_rate": 0.0001928084322457632,
+ "loss": 1.7956,
+ "step": 364
+ },
+ {
+ "epoch": 0.3651026851301929,
+ "grad_norm": 0.6267688870429993,
+ "learning_rate": 0.00019276934772398114,
+ "loss": 1.4664,
+ "step": 365
+ },
+ {
+ "epoch": 0.36610296645931667,
+ "grad_norm": 0.8317943811416626,
+ "learning_rate": 0.00019273016126546323,
+ "loss": 1.853,
+ "step": 366
+ },
+ {
+ "epoch": 0.3671032477884405,
+ "grad_norm": 0.7581344842910767,
+ "learning_rate": 0.00019269087291326833,
+ "loss": 1.9236,
+ "step": 367
+ },
+ {
+ "epoch": 0.36810352911756433,
+ "grad_norm": 0.9311390519142151,
+ "learning_rate": 0.00019265148271056722,
+ "loss": 1.7019,
+ "step": 368
+ },
+ {
+ "epoch": 0.3691038104466881,
+ "grad_norm": 0.9513958096504211,
+ "learning_rate": 0.0001926119907006426,
+ "loss": 1.7617,
+ "step": 369
+ },
+ {
+ "epoch": 0.37010409177581194,
+ "grad_norm": 0.7407613396644592,
+ "learning_rate": 0.00019257239692688907,
+ "loss": 2.1057,
+ "step": 370
+ },
+ {
+ "epoch": 0.37110437310493577,
+ "grad_norm": 0.7530227899551392,
+ "learning_rate": 0.00019253270143281296,
+ "loss": 1.9844,
+ "step": 371
+ },
+ {
+ "epoch": 0.3721046544340596,
+ "grad_norm": 0.6733037233352661,
+ "learning_rate": 0.00019249290426203252,
+ "loss": 1.8939,
+ "step": 372
+ },
+ {
+ "epoch": 0.3731049357631834,
+ "grad_norm": 0.7037007808685303,
+ "learning_rate": 0.0001924530054582776,
+ "loss": 1.6897,
+ "step": 373
+ },
+ {
+ "epoch": 0.3741052170923072,
+ "grad_norm": 0.7689145803451538,
+ "learning_rate": 0.0001924130050653898,
+ "loss": 1.6544,
+ "step": 374
+ },
+ {
+ "epoch": 0.37510549842143104,
+ "grad_norm": 0.7100968360900879,
+ "learning_rate": 0.00019237290312732226,
+ "loss": 1.7932,
+ "step": 375
+ },
+ {
+ "epoch": 0.37610577975055487,
+ "grad_norm": 0.7645193338394165,
+ "learning_rate": 0.00019233269968813984,
+ "loss": 1.6576,
+ "step": 376
+ },
+ {
+ "epoch": 0.37710606107967864,
+ "grad_norm": 0.6831678152084351,
+ "learning_rate": 0.00019229239479201876,
+ "loss": 1.6675,
+ "step": 377
+ },
+ {
+ "epoch": 0.3781063424088025,
+ "grad_norm": 0.8677794337272644,
+ "learning_rate": 0.0001922519884832469,
+ "loss": 1.6064,
+ "step": 378
+ },
+ {
+ "epoch": 0.3791066237379263,
+ "grad_norm": 0.6727691888809204,
+ "learning_rate": 0.0001922114808062234,
+ "loss": 1.6249,
+ "step": 379
+ },
+ {
+ "epoch": 0.3801069050670501,
+ "grad_norm": 0.6588670611381531,
+ "learning_rate": 0.00019217087180545893,
+ "loss": 1.5541,
+ "step": 380
+ },
+ {
+ "epoch": 0.3811071863961739,
+ "grad_norm": 0.7666369080543518,
+ "learning_rate": 0.0001921301615255754,
+ "loss": 1.7806,
+ "step": 381
+ },
+ {
+ "epoch": 0.38210746772529774,
+ "grad_norm": 0.6465156078338623,
+ "learning_rate": 0.0001920893500113061,
+ "loss": 1.512,
+ "step": 382
+ },
+ {
+ "epoch": 0.3831077490544216,
+ "grad_norm": 0.7854346632957458,
+ "learning_rate": 0.00019204843730749547,
+ "loss": 1.6857,
+ "step": 383
+ },
+ {
+ "epoch": 0.38410803038354535,
+ "grad_norm": 0.6625111103057861,
+ "learning_rate": 0.00019200742345909915,
+ "loss": 1.7033,
+ "step": 384
+ },
+ {
+ "epoch": 0.3851083117126692,
+ "grad_norm": 0.7273709177970886,
+ "learning_rate": 0.00019196630851118398,
+ "loss": 1.665,
+ "step": 385
+ },
+ {
+ "epoch": 0.386108593041793,
+ "grad_norm": 0.6861465573310852,
+ "learning_rate": 0.0001919250925089278,
+ "loss": 1.5028,
+ "step": 386
+ },
+ {
+ "epoch": 0.38710887437091684,
+ "grad_norm": 0.845456063747406,
+ "learning_rate": 0.00019188377549761963,
+ "loss": 1.967,
+ "step": 387
+ },
+ {
+ "epoch": 0.3881091557000406,
+ "grad_norm": 0.6481165289878845,
+ "learning_rate": 0.00019184235752265928,
+ "loss": 1.6053,
+ "step": 388
+ },
+ {
+ "epoch": 0.38910943702916445,
+ "grad_norm": 0.6312947273254395,
+ "learning_rate": 0.00019180083862955772,
+ "loss": 1.4427,
+ "step": 389
+ },
+ {
+ "epoch": 0.3901097183582883,
+ "grad_norm": 0.7874154448509216,
+ "learning_rate": 0.00019175921886393666,
+ "loss": 1.6099,
+ "step": 390
+ },
+ {
+ "epoch": 0.39110999968741206,
+ "grad_norm": 0.6839481592178345,
+ "learning_rate": 0.00019171749827152869,
+ "loss": 1.7004,
+ "step": 391
+ },
+ {
+ "epoch": 0.3921102810165359,
+ "grad_norm": 0.7239277362823486,
+ "learning_rate": 0.0001916756768981772,
+ "loss": 1.8813,
+ "step": 392
+ },
+ {
+ "epoch": 0.3931105623456597,
+ "grad_norm": 0.8241100311279297,
+ "learning_rate": 0.00019163375478983632,
+ "loss": 1.9443,
+ "step": 393
+ },
+ {
+ "epoch": 0.39411084367478355,
+ "grad_norm": 0.7401999235153198,
+ "learning_rate": 0.00019159173199257085,
+ "loss": 1.6663,
+ "step": 394
+ },
+ {
+ "epoch": 0.3951111250039073,
+ "grad_norm": 0.8297036290168762,
+ "learning_rate": 0.00019154960855255628,
+ "loss": 1.8012,
+ "step": 395
+ },
+ {
+ "epoch": 0.39611140633303116,
+ "grad_norm": 0.9661216735839844,
+ "learning_rate": 0.0001915073845160786,
+ "loss": 1.7007,
+ "step": 396
+ },
+ {
+ "epoch": 0.397111687662155,
+ "grad_norm": 1.4041926860809326,
+ "learning_rate": 0.00019146505992953446,
+ "loss": 1.7232,
+ "step": 397
+ },
+ {
+ "epoch": 0.3981119689912788,
+ "grad_norm": 0.8469036221504211,
+ "learning_rate": 0.00019142263483943085,
+ "loss": 1.4479,
+ "step": 398
+ },
+ {
+ "epoch": 0.3991122503204026,
+ "grad_norm": 0.9476561546325684,
+ "learning_rate": 0.00019138010929238534,
+ "loss": 1.8572,
+ "step": 399
+ },
+ {
+ "epoch": 0.4001125316495264,
+ "grad_norm": 0.7196705937385559,
+ "learning_rate": 0.00019133748333512575,
+ "loss": 1.6184,
+ "step": 400
+ },
+ {
+ "epoch": 0.40111281297865026,
+ "grad_norm": 0.8957480192184448,
+ "learning_rate": 0.00019129475701449035,
+ "loss": 1.762,
+ "step": 401
+ },
+ {
+ "epoch": 0.4021130943077741,
+ "grad_norm": 0.7850635647773743,
+ "learning_rate": 0.0001912519303774276,
+ "loss": 1.6764,
+ "step": 402
+ },
+ {
+ "epoch": 0.40311337563689786,
+ "grad_norm": 0.7579814791679382,
+ "learning_rate": 0.0001912090034709963,
+ "loss": 1.6231,
+ "step": 403
+ },
+ {
+ "epoch": 0.4041136569660217,
+ "grad_norm": 0.7173107266426086,
+ "learning_rate": 0.00019116597634236525,
+ "loss": 1.7107,
+ "step": 404
+ },
+ {
+ "epoch": 0.4051139382951455,
+ "grad_norm": 0.7832950353622437,
+ "learning_rate": 0.0001911228490388136,
+ "loss": 1.8608,
+ "step": 405
+ },
+ {
+ "epoch": 0.4061142196242693,
+ "grad_norm": 0.716299295425415,
+ "learning_rate": 0.00019107962160773035,
+ "loss": 1.652,
+ "step": 406
+ },
+ {
+ "epoch": 0.40711450095339313,
+ "grad_norm": 0.6675253510475159,
+ "learning_rate": 0.0001910362940966147,
+ "loss": 1.5963,
+ "step": 407
+ },
+ {
+ "epoch": 0.40811478228251696,
+ "grad_norm": 0.6555336713790894,
+ "learning_rate": 0.00019099286655307568,
+ "loss": 1.4991,
+ "step": 408
+ },
+ {
+ "epoch": 0.4091150636116408,
+ "grad_norm": 0.7307867407798767,
+ "learning_rate": 0.0001909493390248324,
+ "loss": 1.8221,
+ "step": 409
+ },
+ {
+ "epoch": 0.41011534494076457,
+ "grad_norm": 0.6557430624961853,
+ "learning_rate": 0.00019090571155971366,
+ "loss": 1.6484,
+ "step": 410
+ },
+ {
+ "epoch": 0.4111156262698884,
+ "grad_norm": 0.6816605925559998,
+ "learning_rate": 0.00019086198420565823,
+ "loss": 1.5052,
+ "step": 411
+ },
+ {
+ "epoch": 0.41211590759901223,
+ "grad_norm": 0.66513592004776,
+ "learning_rate": 0.00019081815701071445,
+ "loss": 1.8818,
+ "step": 412
+ },
+ {
+ "epoch": 0.41311618892813606,
+ "grad_norm": 0.6807469129562378,
+ "learning_rate": 0.0001907742300230406,
+ "loss": 1.5997,
+ "step": 413
+ },
+ {
+ "epoch": 0.41411647025725984,
+ "grad_norm": 0.8060654401779175,
+ "learning_rate": 0.00019073020329090444,
+ "loss": 1.8099,
+ "step": 414
+ },
+ {
+ "epoch": 0.41511675158638367,
+ "grad_norm": 0.7440110445022583,
+ "learning_rate": 0.0001906860768626834,
+ "loss": 1.4876,
+ "step": 415
+ },
+ {
+ "epoch": 0.4161170329155075,
+ "grad_norm": 0.7675415277481079,
+ "learning_rate": 0.00019064185078686443,
+ "loss": 1.4722,
+ "step": 416
+ },
+ {
+ "epoch": 0.4171173142446313,
+ "grad_norm": 0.6656553149223328,
+ "learning_rate": 0.000190597525112044,
+ "loss": 1.6453,
+ "step": 417
+ },
+ {
+ "epoch": 0.4181175955737551,
+ "grad_norm": 0.730689287185669,
+ "learning_rate": 0.000190553099886928,
+ "loss": 1.6584,
+ "step": 418
+ },
+ {
+ "epoch": 0.41911787690287894,
+ "grad_norm": 0.8425858616828918,
+ "learning_rate": 0.00019050857516033173,
+ "loss": 1.6249,
+ "step": 419
+ },
+ {
+ "epoch": 0.42011815823200277,
+ "grad_norm": 0.7816892266273499,
+ "learning_rate": 0.00019046395098117983,
+ "loss": 1.7532,
+ "step": 420
+ },
+ {
+ "epoch": 0.42111843956112655,
+ "grad_norm": 0.7324026823043823,
+ "learning_rate": 0.00019041922739850616,
+ "loss": 1.8523,
+ "step": 421
+ },
+ {
+ "epoch": 0.4221187208902504,
+ "grad_norm": 0.7473389506340027,
+ "learning_rate": 0.00019037440446145385,
+ "loss": 1.583,
+ "step": 422
+ },
+ {
+ "epoch": 0.4231190022193742,
+ "grad_norm": 0.8720895051956177,
+ "learning_rate": 0.00019032948221927524,
+ "loss": 1.6806,
+ "step": 423
+ },
+ {
+ "epoch": 0.42411928354849804,
+ "grad_norm": 0.728528618812561,
+ "learning_rate": 0.00019028446072133175,
+ "loss": 1.7283,
+ "step": 424
+ },
+ {
+ "epoch": 0.4251195648776218,
+ "grad_norm": 0.739930272102356,
+ "learning_rate": 0.00019023934001709383,
+ "loss": 1.7244,
+ "step": 425
+ },
+ {
+ "epoch": 0.42611984620674564,
+ "grad_norm": 0.7825399041175842,
+ "learning_rate": 0.00019019412015614098,
+ "loss": 1.7871,
+ "step": 426
+ },
+ {
+ "epoch": 0.4271201275358695,
+ "grad_norm": 0.8878734707832336,
+ "learning_rate": 0.00019014880118816164,
+ "loss": 1.6018,
+ "step": 427
+ },
+ {
+ "epoch": 0.4281204088649933,
+ "grad_norm": 0.726259708404541,
+ "learning_rate": 0.0001901033831629532,
+ "loss": 1.7732,
+ "step": 428
+ },
+ {
+ "epoch": 0.4291206901941171,
+ "grad_norm": 0.7620319724082947,
+ "learning_rate": 0.00019005786613042185,
+ "loss": 1.6466,
+ "step": 429
+ },
+ {
+ "epoch": 0.4301209715232409,
+ "grad_norm": 0.7295501828193665,
+ "learning_rate": 0.00019001225014058255,
+ "loss": 1.8708,
+ "step": 430
+ },
+ {
+ "epoch": 0.43112125285236474,
+ "grad_norm": 0.7419458031654358,
+ "learning_rate": 0.00018996653524355902,
+ "loss": 1.6583,
+ "step": 431
+ },
+ {
+ "epoch": 0.4321215341814885,
+ "grad_norm": 0.7701705098152161,
+ "learning_rate": 0.00018992072148958368,
+ "loss": 1.4421,
+ "step": 432
+ },
+ {
+ "epoch": 0.43312181551061235,
+ "grad_norm": 0.8237659931182861,
+ "learning_rate": 0.00018987480892899758,
+ "loss": 1.844,
+ "step": 433
+ },
+ {
+ "epoch": 0.4341220968397362,
+ "grad_norm": 0.6167672276496887,
+ "learning_rate": 0.00018982879761225027,
+ "loss": 1.6193,
+ "step": 434
+ },
+ {
+ "epoch": 0.43512237816886,
+ "grad_norm": 0.7565534710884094,
+ "learning_rate": 0.00018978268758989991,
+ "loss": 1.7655,
+ "step": 435
+ },
+ {
+ "epoch": 0.4361226594979838,
+ "grad_norm": 0.8333333730697632,
+ "learning_rate": 0.00018973647891261307,
+ "loss": 1.5764,
+ "step": 436
+ },
+ {
+ "epoch": 0.4371229408271076,
+ "grad_norm": 0.7404434084892273,
+ "learning_rate": 0.00018969017163116472,
+ "loss": 1.7922,
+ "step": 437
+ },
+ {
+ "epoch": 0.43812322215623145,
+ "grad_norm": 0.7129400372505188,
+ "learning_rate": 0.0001896437657964382,
+ "loss": 1.6925,
+ "step": 438
+ },
+ {
+ "epoch": 0.4391235034853553,
+ "grad_norm": 0.7750307321548462,
+ "learning_rate": 0.00018959726145942508,
+ "loss": 1.8133,
+ "step": 439
+ },
+ {
+ "epoch": 0.44012378481447906,
+ "grad_norm": 0.7244920134544373,
+ "learning_rate": 0.00018955065867122528,
+ "loss": 1.6425,
+ "step": 440
+ },
+ {
+ "epoch": 0.4411240661436029,
+ "grad_norm": 0.7624644637107849,
+ "learning_rate": 0.00018950395748304678,
+ "loss": 1.886,
+ "step": 441
+ },
+ {
+ "epoch": 0.4421243474727267,
+ "grad_norm": 0.7016286849975586,
+ "learning_rate": 0.0001894571579462058,
+ "loss": 1.7308,
+ "step": 442
+ },
+ {
+ "epoch": 0.4431246288018505,
+ "grad_norm": 0.6965353488922119,
+ "learning_rate": 0.00018941026011212654,
+ "loss": 1.5872,
+ "step": 443
+ },
+ {
+ "epoch": 0.4441249101309743,
+ "grad_norm": 0.7479360699653625,
+ "learning_rate": 0.00018936326403234125,
+ "loss": 1.8129,
+ "step": 444
+ },
+ {
+ "epoch": 0.44512519146009816,
+ "grad_norm": 0.7027686834335327,
+ "learning_rate": 0.00018931616975849006,
+ "loss": 1.6433,
+ "step": 445
+ },
+ {
+ "epoch": 0.446125472789222,
+ "grad_norm": 0.7771592140197754,
+ "learning_rate": 0.00018926897734232115,
+ "loss": 1.4645,
+ "step": 446
+ },
+ {
+ "epoch": 0.44712575411834576,
+ "grad_norm": 0.7766458988189697,
+ "learning_rate": 0.0001892216868356904,
+ "loss": 1.7873,
+ "step": 447
+ },
+ {
+ "epoch": 0.4481260354474696,
+ "grad_norm": 0.8146182298660278,
+ "learning_rate": 0.0001891742982905615,
+ "loss": 1.7935,
+ "step": 448
+ },
+ {
+ "epoch": 0.4491263167765934,
+ "grad_norm": 0.6744781136512756,
+ "learning_rate": 0.00018912681175900598,
+ "loss": 1.8916,
+ "step": 449
+ },
+ {
+ "epoch": 0.45012659810571726,
+ "grad_norm": 0.6259024143218994,
+ "learning_rate": 0.00018907922729320285,
+ "loss": 1.6616,
+ "step": 450
+ },
+ {
+ "epoch": 0.45112687943484103,
+ "grad_norm": 0.7717494368553162,
+ "learning_rate": 0.00018903154494543889,
+ "loss": 1.817,
+ "step": 451
+ },
+ {
+ "epoch": 0.45212716076396486,
+ "grad_norm": 0.648040771484375,
+ "learning_rate": 0.00018898376476810834,
+ "loss": 1.6309,
+ "step": 452
+ },
+ {
+ "epoch": 0.4531274420930887,
+ "grad_norm": 0.7560017704963684,
+ "learning_rate": 0.00018893588681371303,
+ "loss": 1.8016,
+ "step": 453
+ },
+ {
+ "epoch": 0.45412772342221247,
+ "grad_norm": 0.8778628706932068,
+ "learning_rate": 0.00018888791113486213,
+ "loss": 1.7797,
+ "step": 454
+ },
+ {
+ "epoch": 0.4551280047513363,
+ "grad_norm": 0.6795655488967896,
+ "learning_rate": 0.00018883983778427227,
+ "loss": 1.6343,
+ "step": 455
+ },
+ {
+ "epoch": 0.45612828608046013,
+ "grad_norm": 0.6690213084220886,
+ "learning_rate": 0.0001887916668147673,
+ "loss": 1.6224,
+ "step": 456
+ },
+ {
+ "epoch": 0.45712856740958396,
+ "grad_norm": 0.7529327869415283,
+ "learning_rate": 0.00018874339827927846,
+ "loss": 1.6396,
+ "step": 457
+ },
+ {
+ "epoch": 0.45812884873870774,
+ "grad_norm": 0.8393098711967468,
+ "learning_rate": 0.00018869503223084414,
+ "loss": 1.8374,
+ "step": 458
+ },
+ {
+ "epoch": 0.45912913006783157,
+ "grad_norm": 0.7435776591300964,
+ "learning_rate": 0.00018864656872260985,
+ "loss": 1.7363,
+ "step": 459
+ },
+ {
+ "epoch": 0.4601294113969554,
+ "grad_norm": 0.6737055778503418,
+ "learning_rate": 0.00018859800780782828,
+ "loss": 1.6661,
+ "step": 460
+ },
+ {
+ "epoch": 0.46112969272607923,
+ "grad_norm": 0.7674340605735779,
+ "learning_rate": 0.000188549349539859,
+ "loss": 1.6269,
+ "step": 461
+ },
+ {
+ "epoch": 0.462129974055203,
+ "grad_norm": 0.7329950928688049,
+ "learning_rate": 0.00018850059397216876,
+ "loss": 1.6989,
+ "step": 462
+ },
+ {
+ "epoch": 0.46313025538432684,
+ "grad_norm": 0.7075778841972351,
+ "learning_rate": 0.00018845174115833099,
+ "loss": 1.7286,
+ "step": 463
+ },
+ {
+ "epoch": 0.46413053671345067,
+ "grad_norm": 0.7973611354827881,
+ "learning_rate": 0.0001884027911520262,
+ "loss": 1.7478,
+ "step": 464
+ },
+ {
+ "epoch": 0.4651308180425745,
+ "grad_norm": 0.7790057063102722,
+ "learning_rate": 0.00018835374400704154,
+ "loss": 1.6659,
+ "step": 465
+ },
+ {
+ "epoch": 0.4661310993716983,
+ "grad_norm": 0.8505310416221619,
+ "learning_rate": 0.00018830459977727096,
+ "loss": 1.6879,
+ "step": 466
+ },
+ {
+ "epoch": 0.4671313807008221,
+ "grad_norm": 0.7616267800331116,
+ "learning_rate": 0.0001882553585167151,
+ "loss": 1.6525,
+ "step": 467
+ },
+ {
+ "epoch": 0.46813166202994594,
+ "grad_norm": 0.8038133978843689,
+ "learning_rate": 0.00018820602027948114,
+ "loss": 1.7929,
+ "step": 468
+ },
+ {
+ "epoch": 0.4691319433590697,
+ "grad_norm": 0.6762365698814392,
+ "learning_rate": 0.00018815658511978298,
+ "loss": 1.6926,
+ "step": 469
+ },
+ {
+ "epoch": 0.47013222468819355,
+ "grad_norm": 0.6515015959739685,
+ "learning_rate": 0.00018810705309194083,
+ "loss": 1.7817,
+ "step": 470
+ },
+ {
+ "epoch": 0.4711325060173174,
+ "grad_norm": 0.696675181388855,
+ "learning_rate": 0.00018805742425038145,
+ "loss": 1.7195,
+ "step": 471
+ },
+ {
+ "epoch": 0.4721327873464412,
+ "grad_norm": 0.7929533123970032,
+ "learning_rate": 0.00018800769864963802,
+ "loss": 2.0165,
+ "step": 472
+ },
+ {
+ "epoch": 0.473133068675565,
+ "grad_norm": 0.7223886251449585,
+ "learning_rate": 0.00018795787634434994,
+ "loss": 1.6708,
+ "step": 473
+ },
+ {
+ "epoch": 0.4741333500046888,
+ "grad_norm": 0.7982028126716614,
+ "learning_rate": 0.0001879079573892629,
+ "loss": 1.628,
+ "step": 474
+ },
+ {
+ "epoch": 0.47513363133381264,
+ "grad_norm": 0.6962152123451233,
+ "learning_rate": 0.00018785794183922883,
+ "loss": 1.6429,
+ "step": 475
+ },
+ {
+ "epoch": 0.4761339126629365,
+ "grad_norm": 0.687489926815033,
+ "learning_rate": 0.00018780782974920572,
+ "loss": 1.4546,
+ "step": 476
+ },
+ {
+ "epoch": 0.47713419399206025,
+ "grad_norm": 0.7260375022888184,
+ "learning_rate": 0.00018775762117425777,
+ "loss": 1.739,
+ "step": 477
+ },
+ {
+ "epoch": 0.4781344753211841,
+ "grad_norm": 0.759400486946106,
+ "learning_rate": 0.0001877073161695551,
+ "loss": 1.6465,
+ "step": 478
+ },
+ {
+ "epoch": 0.4791347566503079,
+ "grad_norm": 0.7412364482879639,
+ "learning_rate": 0.00018765691479037376,
+ "loss": 1.7333,
+ "step": 479
+ },
+ {
+ "epoch": 0.4801350379794317,
+ "grad_norm": 0.6909674406051636,
+ "learning_rate": 0.00018760641709209583,
+ "loss": 1.6936,
+ "step": 480
+ },
+ {
+ "epoch": 0.4811353193085555,
+ "grad_norm": 0.6478050947189331,
+ "learning_rate": 0.0001875558231302091,
+ "loss": 1.4435,
+ "step": 481
+ },
+ {
+ "epoch": 0.48213560063767935,
+ "grad_norm": 0.6662638187408447,
+ "learning_rate": 0.00018750513296030718,
+ "loss": 1.5567,
+ "step": 482
+ },
+ {
+ "epoch": 0.4831358819668032,
+ "grad_norm": 0.6973413825035095,
+ "learning_rate": 0.00018745434663808942,
+ "loss": 1.6434,
+ "step": 483
+ },
+ {
+ "epoch": 0.48413616329592696,
+ "grad_norm": 0.8381956815719604,
+ "learning_rate": 0.0001874034642193608,
+ "loss": 1.8568,
+ "step": 484
+ },
+ {
+ "epoch": 0.4851364446250508,
+ "grad_norm": 0.6522262096405029,
+ "learning_rate": 0.0001873524857600319,
+ "loss": 1.4265,
+ "step": 485
+ },
+ {
+ "epoch": 0.4861367259541746,
+ "grad_norm": 0.7018440961837769,
+ "learning_rate": 0.00018730141131611882,
+ "loss": 1.6914,
+ "step": 486
+ },
+ {
+ "epoch": 0.48713700728329845,
+ "grad_norm": 0.8237236142158508,
+ "learning_rate": 0.00018725024094374315,
+ "loss": 1.4462,
+ "step": 487
+ },
+ {
+ "epoch": 0.4881372886124222,
+ "grad_norm": 0.6507928967475891,
+ "learning_rate": 0.00018719897469913184,
+ "loss": 1.5802,
+ "step": 488
+ },
+ {
+ "epoch": 0.48913756994154606,
+ "grad_norm": 0.8120100498199463,
+ "learning_rate": 0.00018714761263861728,
+ "loss": 1.7819,
+ "step": 489
+ },
+ {
+ "epoch": 0.4901378512706699,
+ "grad_norm": 0.759722888469696,
+ "learning_rate": 0.000187096154818637,
+ "loss": 1.7481,
+ "step": 490
+ },
+ {
+ "epoch": 0.4911381325997937,
+ "grad_norm": 0.7146822214126587,
+ "learning_rate": 0.00018704460129573391,
+ "loss": 1.6217,
+ "step": 491
+ },
+ {
+ "epoch": 0.4921384139289175,
+ "grad_norm": 0.7138429284095764,
+ "learning_rate": 0.00018699295212655596,
+ "loss": 1.7242,
+ "step": 492
+ },
+ {
+ "epoch": 0.4931386952580413,
+ "grad_norm": 0.8145613670349121,
+ "learning_rate": 0.00018694120736785632,
+ "loss": 1.8759,
+ "step": 493
+ },
+ {
+ "epoch": 0.49413897658716516,
+ "grad_norm": 0.6624435186386108,
+ "learning_rate": 0.00018688936707649304,
+ "loss": 1.8632,
+ "step": 494
+ },
+ {
+ "epoch": 0.49513925791628893,
+ "grad_norm": 0.6550843119621277,
+ "learning_rate": 0.00018683743130942928,
+ "loss": 1.7645,
+ "step": 495
+ },
+ {
+ "epoch": 0.49613953924541276,
+ "grad_norm": 0.6931488513946533,
+ "learning_rate": 0.00018678540012373302,
+ "loss": 1.5899,
+ "step": 496
+ },
+ {
+ "epoch": 0.4971398205745366,
+ "grad_norm": 0.684229850769043,
+ "learning_rate": 0.00018673327357657715,
+ "loss": 1.6902,
+ "step": 497
+ },
+ {
+ "epoch": 0.4981401019036604,
+ "grad_norm": 0.7380666136741638,
+ "learning_rate": 0.0001866810517252393,
+ "loss": 1.7115,
+ "step": 498
+ },
+ {
+ "epoch": 0.4991403832327842,
+ "grad_norm": 0.6782827973365784,
+ "learning_rate": 0.00018662873462710184,
+ "loss": 1.495,
+ "step": 499
+ },
+ {
+ "epoch": 0.5001406645619081,
+ "grad_norm": 0.730248749256134,
+ "learning_rate": 0.0001865763223396518,
+ "loss": 1.5147,
+ "step": 500
+ },
+ {
+ "epoch": 0.5011409458910319,
+ "grad_norm": 0.7644149661064148,
+ "learning_rate": 0.00018652381492048083,
+ "loss": 1.7278,
+ "step": 501
+ },
+ {
+ "epoch": 0.5021412272201556,
+ "grad_norm": 0.6977668404579163,
+ "learning_rate": 0.00018647121242728506,
+ "loss": 1.5194,
+ "step": 502
+ },
+ {
+ "epoch": 0.5031415085492795,
+ "grad_norm": 0.7714502215385437,
+ "learning_rate": 0.00018641851491786512,
+ "loss": 2.0039,
+ "step": 503
+ },
+ {
+ "epoch": 0.5041417898784033,
+ "grad_norm": 0.9013757109642029,
+ "learning_rate": 0.00018636572245012606,
+ "loss": 1.8069,
+ "step": 504
+ },
+ {
+ "epoch": 0.5051420712075271,
+ "grad_norm": 0.7173192501068115,
+ "learning_rate": 0.00018631283508207725,
+ "loss": 1.841,
+ "step": 505
+ },
+ {
+ "epoch": 0.506142352536651,
+ "grad_norm": 0.7639481425285339,
+ "learning_rate": 0.00018625985287183233,
+ "loss": 1.5333,
+ "step": 506
+ },
+ {
+ "epoch": 0.5071426338657747,
+ "grad_norm": 0.8779808282852173,
+ "learning_rate": 0.00018620677587760916,
+ "loss": 1.8304,
+ "step": 507
+ },
+ {
+ "epoch": 0.5081429151948985,
+ "grad_norm": 0.8956230282783508,
+ "learning_rate": 0.00018615360415772978,
+ "loss": 1.7228,
+ "step": 508
+ },
+ {
+ "epoch": 0.5091431965240224,
+ "grad_norm": 0.8217945098876953,
+ "learning_rate": 0.00018610033777062025,
+ "loss": 1.4977,
+ "step": 509
+ },
+ {
+ "epoch": 0.5101434778531462,
+ "grad_norm": 0.7628902196884155,
+ "learning_rate": 0.0001860469767748108,
+ "loss": 1.7634,
+ "step": 510
+ },
+ {
+ "epoch": 0.5111437591822701,
+ "grad_norm": 0.635911226272583,
+ "learning_rate": 0.00018599352122893539,
+ "loss": 1.5103,
+ "step": 511
+ },
+ {
+ "epoch": 0.5121440405113938,
+ "grad_norm": 0.6439646482467651,
+ "learning_rate": 0.00018593997119173205,
+ "loss": 1.7281,
+ "step": 512
+ },
+ {
+ "epoch": 0.5131443218405176,
+ "grad_norm": 0.7753567099571228,
+ "learning_rate": 0.00018588632672204264,
+ "loss": 1.9028,
+ "step": 513
+ },
+ {
+ "epoch": 0.5141446031696415,
+ "grad_norm": 0.8296717405319214,
+ "learning_rate": 0.0001858325878788126,
+ "loss": 1.9049,
+ "step": 514
+ },
+ {
+ "epoch": 0.5151448844987653,
+ "grad_norm": 0.7379167079925537,
+ "learning_rate": 0.00018577875472109134,
+ "loss": 1.6262,
+ "step": 515
+ },
+ {
+ "epoch": 0.516145165827889,
+ "grad_norm": 0.634040355682373,
+ "learning_rate": 0.0001857248273080317,
+ "loss": 1.5416,
+ "step": 516
+ },
+ {
+ "epoch": 0.5171454471570129,
+ "grad_norm": 0.7394217252731323,
+ "learning_rate": 0.00018567080569889015,
+ "loss": 1.6035,
+ "step": 517
+ },
+ {
+ "epoch": 0.5181457284861367,
+ "grad_norm": 0.705426037311554,
+ "learning_rate": 0.00018561668995302667,
+ "loss": 1.616,
+ "step": 518
+ },
+ {
+ "epoch": 0.5191460098152605,
+ "grad_norm": 0.778021514415741,
+ "learning_rate": 0.00018556248012990468,
+ "loss": 1.5206,
+ "step": 519
+ },
+ {
+ "epoch": 0.5201462911443844,
+ "grad_norm": 0.7203211188316345,
+ "learning_rate": 0.000185508176289091,
+ "loss": 1.5369,
+ "step": 520
+ },
+ {
+ "epoch": 0.5211465724735082,
+ "grad_norm": 0.7390999794006348,
+ "learning_rate": 0.00018545377849025566,
+ "loss": 1.6438,
+ "step": 521
+ },
+ {
+ "epoch": 0.522146853802632,
+ "grad_norm": 0.6779179573059082,
+ "learning_rate": 0.0001853992867931721,
+ "loss": 1.6268,
+ "step": 522
+ },
+ {
+ "epoch": 0.5231471351317558,
+ "grad_norm": 0.6589105725288391,
+ "learning_rate": 0.00018534470125771674,
+ "loss": 1.8353,
+ "step": 523
+ },
+ {
+ "epoch": 0.5241474164608796,
+ "grad_norm": 0.692081868648529,
+ "learning_rate": 0.0001852900219438693,
+ "loss": 1.7047,
+ "step": 524
+ },
+ {
+ "epoch": 0.5251476977900035,
+ "grad_norm": 0.6639224886894226,
+ "learning_rate": 0.0001852352489117124,
+ "loss": 1.7448,
+ "step": 525
+ },
+ {
+ "epoch": 0.5261479791191273,
+ "grad_norm": 0.7168188095092773,
+ "learning_rate": 0.00018518038222143174,
+ "loss": 1.6734,
+ "step": 526
+ },
+ {
+ "epoch": 0.527148260448251,
+ "grad_norm": 0.7671873569488525,
+ "learning_rate": 0.00018512542193331583,
+ "loss": 1.9392,
+ "step": 527
+ },
+ {
+ "epoch": 0.5281485417773749,
+ "grad_norm": 0.7861583828926086,
+ "learning_rate": 0.00018507036810775615,
+ "loss": 1.5749,
+ "step": 528
+ },
+ {
+ "epoch": 0.5291488231064987,
+ "grad_norm": 0.6727952361106873,
+ "learning_rate": 0.00018501522080524688,
+ "loss": 1.7584,
+ "step": 529
+ },
+ {
+ "epoch": 0.5301491044356225,
+ "grad_norm": 0.7287748456001282,
+ "learning_rate": 0.0001849599800863849,
+ "loss": 1.783,
+ "step": 530
+ },
+ {
+ "epoch": 0.5311493857647464,
+ "grad_norm": 0.6883361339569092,
+ "learning_rate": 0.0001849046460118698,
+ "loss": 1.6104,
+ "step": 531
+ },
+ {
+ "epoch": 0.5321496670938701,
+ "grad_norm": 0.6767789125442505,
+ "learning_rate": 0.0001848492186425037,
+ "loss": 1.8416,
+ "step": 532
+ },
+ {
+ "epoch": 0.533149948422994,
+ "grad_norm": 0.7468088269233704,
+ "learning_rate": 0.0001847936980391913,
+ "loss": 1.8297,
+ "step": 533
+ },
+ {
+ "epoch": 0.5341502297521178,
+ "grad_norm": 0.7560007572174072,
+ "learning_rate": 0.00018473808426293964,
+ "loss": 1.5405,
+ "step": 534
+ },
+ {
+ "epoch": 0.5351505110812416,
+ "grad_norm": 0.6900463104248047,
+ "learning_rate": 0.00018468237737485823,
+ "loss": 1.5117,
+ "step": 535
+ },
+ {
+ "epoch": 0.5361507924103655,
+ "grad_norm": 0.8691229820251465,
+ "learning_rate": 0.00018462657743615888,
+ "loss": 1.724,
+ "step": 536
+ },
+ {
+ "epoch": 0.5371510737394892,
+ "grad_norm": 0.7081372141838074,
+ "learning_rate": 0.00018457068450815562,
+ "loss": 1.6418,
+ "step": 537
+ },
+ {
+ "epoch": 0.538151355068613,
+ "grad_norm": 0.8148525953292847,
+ "learning_rate": 0.00018451469865226464,
+ "loss": 1.8564,
+ "step": 538
+ },
+ {
+ "epoch": 0.5391516363977369,
+ "grad_norm": 0.7306076288223267,
+ "learning_rate": 0.00018445861993000436,
+ "loss": 1.4974,
+ "step": 539
+ },
+ {
+ "epoch": 0.5401519177268607,
+ "grad_norm": 0.815825343132019,
+ "learning_rate": 0.00018440244840299506,
+ "loss": 1.8965,
+ "step": 540
+ },
+ {
+ "epoch": 0.5411521990559846,
+ "grad_norm": 0.753034234046936,
+ "learning_rate": 0.0001843461841329591,
+ "loss": 2.016,
+ "step": 541
+ },
+ {
+ "epoch": 0.5421524803851083,
+ "grad_norm": 0.8658022284507751,
+ "learning_rate": 0.0001842898271817208,
+ "loss": 1.6697,
+ "step": 542
+ },
+ {
+ "epoch": 0.5431527617142321,
+ "grad_norm": 0.7143135666847229,
+ "learning_rate": 0.00018423337761120618,
+ "loss": 1.8741,
+ "step": 543
+ },
+ {
+ "epoch": 0.544153043043356,
+ "grad_norm": 0.6843370795249939,
+ "learning_rate": 0.00018417683548344318,
+ "loss": 1.763,
+ "step": 544
+ },
+ {
+ "epoch": 0.5451533243724798,
+ "grad_norm": 0.6699584126472473,
+ "learning_rate": 0.00018412020086056133,
+ "loss": 1.7126,
+ "step": 545
+ },
+ {
+ "epoch": 0.5461536057016035,
+ "grad_norm": 0.6921600699424744,
+ "learning_rate": 0.0001840634738047918,
+ "loss": 1.6697,
+ "step": 546
+ },
+ {
+ "epoch": 0.5471538870307274,
+ "grad_norm": 0.822501003742218,
+ "learning_rate": 0.0001840066543784675,
+ "loss": 1.7045,
+ "step": 547
+ },
+ {
+ "epoch": 0.5481541683598512,
+ "grad_norm": 0.7563886046409607,
+ "learning_rate": 0.00018394974264402257,
+ "loss": 1.6853,
+ "step": 548
+ },
+ {
+ "epoch": 0.549154449688975,
+ "grad_norm": 0.7408218383789062,
+ "learning_rate": 0.00018389273866399275,
+ "loss": 1.6496,
+ "step": 549
+ },
+ {
+ "epoch": 0.5501547310180989,
+ "grad_norm": 0.6454717516899109,
+ "learning_rate": 0.00018383564250101512,
+ "loss": 1.5063,
+ "step": 550
+ },
+ {
+ "epoch": 0.5511550123472226,
+ "grad_norm": 0.7033074498176575,
+ "learning_rate": 0.000183778454217828,
+ "loss": 1.6432,
+ "step": 551
+ },
+ {
+ "epoch": 0.5521552936763465,
+ "grad_norm": 0.768194854259491,
+ "learning_rate": 0.0001837211738772711,
+ "loss": 2.0594,
+ "step": 552
+ },
+ {
+ "epoch": 0.5531555750054703,
+ "grad_norm": 0.7805166244506836,
+ "learning_rate": 0.000183663801542285,
+ "loss": 1.4317,
+ "step": 553
+ },
+ {
+ "epoch": 0.5541558563345941,
+ "grad_norm": 0.6603556871414185,
+ "learning_rate": 0.00018360633727591155,
+ "loss": 1.4171,
+ "step": 554
+ },
+ {
+ "epoch": 0.555156137663718,
+ "grad_norm": 0.6996607780456543,
+ "learning_rate": 0.00018354878114129367,
+ "loss": 1.6832,
+ "step": 555
+ },
+ {
+ "epoch": 0.5561564189928417,
+ "grad_norm": 0.7861623167991638,
+ "learning_rate": 0.00018349113320167504,
+ "loss": 1.8425,
+ "step": 556
+ },
+ {
+ "epoch": 0.5571567003219655,
+ "grad_norm": 0.8387210369110107,
+ "learning_rate": 0.00018343339352040042,
+ "loss": 2.1272,
+ "step": 557
+ },
+ {
+ "epoch": 0.5581569816510894,
+ "grad_norm": 0.829555094242096,
+ "learning_rate": 0.00018337556216091517,
+ "loss": 1.4835,
+ "step": 558
+ },
+ {
+ "epoch": 0.5591572629802132,
+ "grad_norm": 0.7824863791465759,
+ "learning_rate": 0.00018331763918676556,
+ "loss": 1.8893,
+ "step": 559
+ },
+ {
+ "epoch": 0.560157544309337,
+ "grad_norm": 0.707683801651001,
+ "learning_rate": 0.00018325962466159848,
+ "loss": 1.6492,
+ "step": 560
+ },
+ {
+ "epoch": 0.5611578256384608,
+ "grad_norm": 0.775600254535675,
+ "learning_rate": 0.00018320151864916135,
+ "loss": 1.6542,
+ "step": 561
+ },
+ {
+ "epoch": 0.5621581069675846,
+ "grad_norm": 0.7602002024650574,
+ "learning_rate": 0.00018314332121330225,
+ "loss": 1.9625,
+ "step": 562
+ },
+ {
+ "epoch": 0.5631583882967085,
+ "grad_norm": 0.7535431385040283,
+ "learning_rate": 0.0001830850324179695,
+ "loss": 1.6407,
+ "step": 563
+ },
+ {
+ "epoch": 0.5641586696258323,
+ "grad_norm": 1.1884644031524658,
+ "learning_rate": 0.00018302665232721208,
+ "loss": 1.6188,
+ "step": 564
+ },
+ {
+ "epoch": 0.565158950954956,
+ "grad_norm": 0.7576595544815063,
+ "learning_rate": 0.0001829681810051791,
+ "loss": 1.7739,
+ "step": 565
+ },
+ {
+ "epoch": 0.5661592322840799,
+ "grad_norm": 0.6807442307472229,
+ "learning_rate": 0.00018290961851611995,
+ "loss": 1.6244,
+ "step": 566
+ },
+ {
+ "epoch": 0.5671595136132037,
+ "grad_norm": 0.7222456932067871,
+ "learning_rate": 0.00018285096492438424,
+ "loss": 1.7809,
+ "step": 567
+ },
+ {
+ "epoch": 0.5681597949423275,
+ "grad_norm": 0.7002213597297668,
+ "learning_rate": 0.00018279222029442163,
+ "loss": 1.5462,
+ "step": 568
+ },
+ {
+ "epoch": 0.5691600762714514,
+ "grad_norm": 0.8433569669723511,
+ "learning_rate": 0.00018273338469078186,
+ "loss": 1.5042,
+ "step": 569
+ },
+ {
+ "epoch": 0.5701603576005752,
+ "grad_norm": 0.663144588470459,
+ "learning_rate": 0.00018267445817811466,
+ "loss": 1.7133,
+ "step": 570
+ },
+ {
+ "epoch": 0.5711606389296989,
+ "grad_norm": 0.7298465967178345,
+ "learning_rate": 0.00018261544082116954,
+ "loss": 2.0201,
+ "step": 571
+ },
+ {
+ "epoch": 0.5721609202588228,
+ "grad_norm": 0.7613754868507385,
+ "learning_rate": 0.00018255633268479595,
+ "loss": 1.8065,
+ "step": 572
+ },
+ {
+ "epoch": 0.5731612015879466,
+ "grad_norm": 0.7252177596092224,
+ "learning_rate": 0.00018249713383394303,
+ "loss": 1.5715,
+ "step": 573
+ },
+ {
+ "epoch": 0.5741614829170705,
+ "grad_norm": 0.783961832523346,
+ "learning_rate": 0.0001824378443336596,
+ "loss": 1.7102,
+ "step": 574
+ },
+ {
+ "epoch": 0.5751617642461943,
+ "grad_norm": 0.8532115817070007,
+ "learning_rate": 0.00018237846424909413,
+ "loss": 1.7011,
+ "step": 575
+ },
+ {
+ "epoch": 0.576162045575318,
+ "grad_norm": 0.7841559052467346,
+ "learning_rate": 0.00018231899364549455,
+ "loss": 1.6397,
+ "step": 576
+ },
+ {
+ "epoch": 0.5771623269044419,
+ "grad_norm": 0.7118051648139954,
+ "learning_rate": 0.00018225943258820833,
+ "loss": 1.7166,
+ "step": 577
+ },
+ {
+ "epoch": 0.5781626082335657,
+ "grad_norm": 0.7298933863639832,
+ "learning_rate": 0.00018219978114268227,
+ "loss": 1.604,
+ "step": 578
+ },
+ {
+ "epoch": 0.5791628895626895,
+ "grad_norm": 0.6644678711891174,
+ "learning_rate": 0.00018214003937446253,
+ "loss": 1.7673,
+ "step": 579
+ },
+ {
+ "epoch": 0.5801631708918134,
+ "grad_norm": 0.6707085371017456,
+ "learning_rate": 0.00018208020734919455,
+ "loss": 1.662,
+ "step": 580
+ },
+ {
+ "epoch": 0.5811634522209371,
+ "grad_norm": 0.7431498765945435,
+ "learning_rate": 0.00018202028513262288,
+ "loss": 1.7757,
+ "step": 581
+ },
+ {
+ "epoch": 0.5821637335500609,
+ "grad_norm": 0.6936060190200806,
+ "learning_rate": 0.00018196027279059117,
+ "loss": 1.8464,
+ "step": 582
+ },
+ {
+ "epoch": 0.5831640148791848,
+ "grad_norm": 0.6512508988380432,
+ "learning_rate": 0.00018190017038904215,
+ "loss": 1.5537,
+ "step": 583
+ },
+ {
+ "epoch": 0.5841642962083086,
+ "grad_norm": 0.7541425228118896,
+ "learning_rate": 0.0001818399779940175,
+ "loss": 1.8552,
+ "step": 584
+ },
+ {
+ "epoch": 0.5851645775374325,
+ "grad_norm": 0.720447301864624,
+ "learning_rate": 0.0001817796956716578,
+ "loss": 1.5392,
+ "step": 585
+ },
+ {
+ "epoch": 0.5861648588665562,
+ "grad_norm": 0.7526831030845642,
+ "learning_rate": 0.00018171932348820234,
+ "loss": 1.8224,
+ "step": 586
+ },
+ {
+ "epoch": 0.58716514019568,
+ "grad_norm": 0.6906039714813232,
+ "learning_rate": 0.0001816588615099893,
+ "loss": 1.6498,
+ "step": 587
+ },
+ {
+ "epoch": 0.5881654215248039,
+ "grad_norm": 0.726737380027771,
+ "learning_rate": 0.00018159830980345548,
+ "loss": 1.5377,
+ "step": 588
+ },
+ {
+ "epoch": 0.5891657028539277,
+ "grad_norm": 0.6792006492614746,
+ "learning_rate": 0.0001815376684351362,
+ "loss": 1.8905,
+ "step": 589
+ },
+ {
+ "epoch": 0.5901659841830514,
+ "grad_norm": 0.7885284423828125,
+ "learning_rate": 0.00018147693747166534,
+ "loss": 1.709,
+ "step": 590
+ },
+ {
+ "epoch": 0.5911662655121753,
+ "grad_norm": 0.7270367741584778,
+ "learning_rate": 0.00018141611697977529,
+ "loss": 1.7973,
+ "step": 591
+ },
+ {
+ "epoch": 0.5921665468412991,
+ "grad_norm": 0.7852438688278198,
+ "learning_rate": 0.00018135520702629675,
+ "loss": 1.6312,
+ "step": 592
+ },
+ {
+ "epoch": 0.5931668281704229,
+ "grad_norm": 0.7881343960762024,
+ "learning_rate": 0.0001812942076781588,
+ "loss": 1.7581,
+ "step": 593
+ },
+ {
+ "epoch": 0.5941671094995468,
+ "grad_norm": 0.7581279277801514,
+ "learning_rate": 0.0001812331190023886,
+ "loss": 1.6811,
+ "step": 594
+ },
+ {
+ "epoch": 0.5951673908286705,
+ "grad_norm": 0.7250573039054871,
+ "learning_rate": 0.0001811719410661116,
+ "loss": 1.6835,
+ "step": 595
+ },
+ {
+ "epoch": 0.5961676721577944,
+ "grad_norm": 0.7383652329444885,
+ "learning_rate": 0.00018111067393655132,
+ "loss": 1.7804,
+ "step": 596
+ },
+ {
+ "epoch": 0.5971679534869182,
+ "grad_norm": 0.6631885170936584,
+ "learning_rate": 0.0001810493176810292,
+ "loss": 1.477,
+ "step": 597
+ },
+ {
+ "epoch": 0.598168234816042,
+ "grad_norm": 0.6705698370933533,
+ "learning_rate": 0.00018098787236696474,
+ "loss": 1.5939,
+ "step": 598
+ },
+ {
+ "epoch": 0.5991685161451659,
+ "grad_norm": 0.6646862626075745,
+ "learning_rate": 0.00018092633806187513,
+ "loss": 1.5903,
+ "step": 599
+ },
+ {
+ "epoch": 0.6001687974742896,
+ "grad_norm": 0.7267604470252991,
+ "learning_rate": 0.0001808647148333755,
+ "loss": 1.6864,
+ "step": 600
+ },
+ {
+ "epoch": 0.6011690788034134,
+ "grad_norm": 0.672102689743042,
+ "learning_rate": 0.00018080300274917862,
+ "loss": 1.73,
+ "step": 601
+ },
+ {
+ "epoch": 0.6021693601325373,
+ "grad_norm": 0.7541230320930481,
+ "learning_rate": 0.00018074120187709495,
+ "loss": 1.7824,
+ "step": 602
+ },
+ {
+ "epoch": 0.6031696414616611,
+ "grad_norm": 0.6589316129684448,
+ "learning_rate": 0.00018067931228503246,
+ "loss": 1.7007,
+ "step": 603
+ },
+ {
+ "epoch": 0.604169922790785,
+ "grad_norm": 0.7083007097244263,
+ "learning_rate": 0.00018061733404099655,
+ "loss": 1.7703,
+ "step": 604
+ },
+ {
+ "epoch": 0.6051702041199087,
+ "grad_norm": 0.6700689196586609,
+ "learning_rate": 0.00018055526721309016,
+ "loss": 1.5967,
+ "step": 605
+ },
+ {
+ "epoch": 0.6061704854490325,
+ "grad_norm": 0.6766354441642761,
+ "learning_rate": 0.0001804931118695135,
+ "loss": 1.6208,
+ "step": 606
+ },
+ {
+ "epoch": 0.6071707667781564,
+ "grad_norm": 0.8214102387428284,
+ "learning_rate": 0.00018043086807856403,
+ "loss": 1.6084,
+ "step": 607
+ },
+ {
+ "epoch": 0.6081710481072802,
+ "grad_norm": 0.736492395401001,
+ "learning_rate": 0.00018036853590863648,
+ "loss": 1.7254,
+ "step": 608
+ },
+ {
+ "epoch": 0.609171329436404,
+ "grad_norm": 0.6791033148765564,
+ "learning_rate": 0.00018030611542822257,
+ "loss": 1.5837,
+ "step": 609
+ },
+ {
+ "epoch": 0.6101716107655278,
+ "grad_norm": 0.6344060301780701,
+ "learning_rate": 0.00018024360670591114,
+ "loss": 1.5407,
+ "step": 610
+ },
+ {
+ "epoch": 0.6111718920946516,
+ "grad_norm": 0.9026575088500977,
+ "learning_rate": 0.00018018100981038798,
+ "loss": 1.6748,
+ "step": 611
+ },
+ {
+ "epoch": 0.6121721734237754,
+ "grad_norm": 0.8029866814613342,
+ "learning_rate": 0.00018011832481043576,
+ "loss": 1.7547,
+ "step": 612
+ },
+ {
+ "epoch": 0.6131724547528993,
+ "grad_norm": 0.8065117597579956,
+ "learning_rate": 0.00018005555177493394,
+ "loss": 1.8051,
+ "step": 613
+ },
+ {
+ "epoch": 0.614172736082023,
+ "grad_norm": 0.7858480215072632,
+ "learning_rate": 0.00017999269077285875,
+ "loss": 1.6728,
+ "step": 614
+ },
+ {
+ "epoch": 0.6151730174111469,
+ "grad_norm": 0.6735272407531738,
+ "learning_rate": 0.00017992974187328305,
+ "loss": 1.8585,
+ "step": 615
+ },
+ {
+ "epoch": 0.6161732987402707,
+ "grad_norm": 0.7518951892852783,
+ "learning_rate": 0.00017986670514537627,
+ "loss": 1.5429,
+ "step": 616
+ },
+ {
+ "epoch": 0.6171735800693945,
+ "grad_norm": 0.6952928900718689,
+ "learning_rate": 0.00017980358065840444,
+ "loss": 1.5982,
+ "step": 617
+ },
+ {
+ "epoch": 0.6181738613985184,
+ "grad_norm": 0.8996840119361877,
+ "learning_rate": 0.0001797403684817299,
+ "loss": 1.8164,
+ "step": 618
+ },
+ {
+ "epoch": 0.6191741427276422,
+ "grad_norm": 0.7645425200462341,
+ "learning_rate": 0.00017967706868481144,
+ "loss": 1.8373,
+ "step": 619
+ },
+ {
+ "epoch": 0.6201744240567659,
+ "grad_norm": 0.8479064106941223,
+ "learning_rate": 0.00017961368133720407,
+ "loss": 1.6483,
+ "step": 620
+ },
+ {
+ "epoch": 0.6211747053858898,
+ "grad_norm": 0.7806827425956726,
+ "learning_rate": 0.000179550206508559,
+ "loss": 1.78,
+ "step": 621
+ },
+ {
+ "epoch": 0.6221749867150136,
+ "grad_norm": 0.6476775407791138,
+ "learning_rate": 0.00017948664426862364,
+ "loss": 1.7712,
+ "step": 622
+ },
+ {
+ "epoch": 0.6231752680441374,
+ "grad_norm": 0.8421279788017273,
+ "learning_rate": 0.00017942299468724134,
+ "loss": 1.7753,
+ "step": 623
+ },
+ {
+ "epoch": 0.6241755493732613,
+ "grad_norm": 0.6706071496009827,
+ "learning_rate": 0.0001793592578343515,
+ "loss": 1.4093,
+ "step": 624
+ },
+ {
+ "epoch": 0.625175830702385,
+ "grad_norm": 0.8224231004714966,
+ "learning_rate": 0.0001792954337799894,
+ "loss": 1.7343,
+ "step": 625
+ },
+ {
+ "epoch": 0.6261761120315089,
+ "grad_norm": 0.8398690819740295,
+ "learning_rate": 0.00017923152259428612,
+ "loss": 1.8017,
+ "step": 626
+ },
+ {
+ "epoch": 0.6271763933606327,
+ "grad_norm": 0.6664738059043884,
+ "learning_rate": 0.00017916752434746856,
+ "loss": 1.6023,
+ "step": 627
+ },
+ {
+ "epoch": 0.6281766746897565,
+ "grad_norm": 0.9246477484703064,
+ "learning_rate": 0.0001791034391098591,
+ "loss": 1.7862,
+ "step": 628
+ },
+ {
+ "epoch": 0.6291769560188804,
+ "grad_norm": 0.797835111618042,
+ "learning_rate": 0.00017903926695187595,
+ "loss": 1.6059,
+ "step": 629
+ },
+ {
+ "epoch": 0.6301772373480041,
+ "grad_norm": 0.613727331161499,
+ "learning_rate": 0.0001789750079440326,
+ "loss": 1.5086,
+ "step": 630
+ },
+ {
+ "epoch": 0.6311775186771279,
+ "grad_norm": 0.7127765417098999,
+ "learning_rate": 0.00017891066215693817,
+ "loss": 1.5985,
+ "step": 631
+ },
+ {
+ "epoch": 0.6321778000062518,
+ "grad_norm": 0.6923073530197144,
+ "learning_rate": 0.00017884622966129695,
+ "loss": 1.5537,
+ "step": 632
+ },
+ {
+ "epoch": 0.6331780813353756,
+ "grad_norm": 0.7015733122825623,
+ "learning_rate": 0.00017878171052790868,
+ "loss": 1.7782,
+ "step": 633
+ },
+ {
+ "epoch": 0.6341783626644993,
+ "grad_norm": 0.6932784914970398,
+ "learning_rate": 0.00017871710482766817,
+ "loss": 1.4294,
+ "step": 634
+ },
+ {
+ "epoch": 0.6351786439936232,
+ "grad_norm": 0.7054254412651062,
+ "learning_rate": 0.00017865241263156546,
+ "loss": 1.7227,
+ "step": 635
+ },
+ {
+ "epoch": 0.636178925322747,
+ "grad_norm": 0.6994242072105408,
+ "learning_rate": 0.0001785876340106855,
+ "loss": 1.5998,
+ "step": 636
+ },
+ {
+ "epoch": 0.6371792066518709,
+ "grad_norm": 0.81461501121521,
+ "learning_rate": 0.0001785227690362083,
+ "loss": 1.7212,
+ "step": 637
+ },
+ {
+ "epoch": 0.6381794879809947,
+ "grad_norm": 0.943434476852417,
+ "learning_rate": 0.00017845781777940878,
+ "loss": 1.5926,
+ "step": 638
+ },
+ {
+ "epoch": 0.6391797693101184,
+ "grad_norm": 0.8455945253372192,
+ "learning_rate": 0.00017839278031165658,
+ "loss": 1.8511,
+ "step": 639
+ },
+ {
+ "epoch": 0.6401800506392423,
+ "grad_norm": 0.9348243474960327,
+ "learning_rate": 0.00017832765670441612,
+ "loss": 1.6293,
+ "step": 640
+ },
+ {
+ "epoch": 0.6411803319683661,
+ "grad_norm": 0.746127724647522,
+ "learning_rate": 0.0001782624470292465,
+ "loss": 1.4903,
+ "step": 641
+ },
+ {
+ "epoch": 0.6421806132974899,
+ "grad_norm": 0.6215783357620239,
+ "learning_rate": 0.0001781971513578013,
+ "loss": 1.7806,
+ "step": 642
+ },
+ {
+ "epoch": 0.6431808946266138,
+ "grad_norm": 0.7447994947433472,
+ "learning_rate": 0.00017813176976182873,
+ "loss": 1.7475,
+ "step": 643
+ },
+ {
+ "epoch": 0.6441811759557375,
+ "grad_norm": 0.6916540265083313,
+ "learning_rate": 0.00017806630231317127,
+ "loss": 1.6401,
+ "step": 644
+ },
+ {
+ "epoch": 0.6451814572848613,
+ "grad_norm": 0.7208524942398071,
+ "learning_rate": 0.00017800074908376584,
+ "loss": 1.7524,
+ "step": 645
+ },
+ {
+ "epoch": 0.6461817386139852,
+ "grad_norm": 0.7548331618309021,
+ "learning_rate": 0.00017793511014564358,
+ "loss": 1.5644,
+ "step": 646
+ },
+ {
+ "epoch": 0.647182019943109,
+ "grad_norm": 0.7919667959213257,
+ "learning_rate": 0.00017786938557092983,
+ "loss": 1.6758,
+ "step": 647
+ },
+ {
+ "epoch": 0.6481823012722329,
+ "grad_norm": 0.700618326663971,
+ "learning_rate": 0.00017780357543184397,
+ "loss": 1.5213,
+ "step": 648
+ },
+ {
+ "epoch": 0.6491825826013566,
+ "grad_norm": 0.6646535992622375,
+ "learning_rate": 0.00017773767980069945,
+ "loss": 1.6487,
+ "step": 649
+ },
+ {
+ "epoch": 0.6501828639304804,
+ "grad_norm": 0.6486669182777405,
+ "learning_rate": 0.0001776716987499037,
+ "loss": 1.6556,
+ "step": 650
+ },
+ {
+ "epoch": 0.6511831452596043,
+ "grad_norm": 0.657747745513916,
+ "learning_rate": 0.0001776056323519579,
+ "loss": 1.5943,
+ "step": 651
+ },
+ {
+ "epoch": 0.6521834265887281,
+ "grad_norm": 0.7777379751205444,
+ "learning_rate": 0.00017753948067945712,
+ "loss": 1.6069,
+ "step": 652
+ },
+ {
+ "epoch": 0.6531837079178519,
+ "grad_norm": 0.772153913974762,
+ "learning_rate": 0.00017747324380509006,
+ "loss": 1.7065,
+ "step": 653
+ },
+ {
+ "epoch": 0.6541839892469757,
+ "grad_norm": 0.6984367966651917,
+ "learning_rate": 0.00017740692180163908,
+ "loss": 1.7122,
+ "step": 654
+ },
+ {
+ "epoch": 0.6551842705760995,
+ "grad_norm": 0.8033855557441711,
+ "learning_rate": 0.00017734051474198003,
+ "loss": 1.6095,
+ "step": 655
+ },
+ {
+ "epoch": 0.6561845519052233,
+ "grad_norm": 0.7568691372871399,
+ "learning_rate": 0.0001772740226990823,
+ "loss": 1.6783,
+ "step": 656
+ },
+ {
+ "epoch": 0.6571848332343472,
+ "grad_norm": 0.7288162708282471,
+ "learning_rate": 0.00017720744574600863,
+ "loss": 1.695,
+ "step": 657
+ },
+ {
+ "epoch": 0.658185114563471,
+ "grad_norm": 0.6898120045661926,
+ "learning_rate": 0.00017714078395591502,
+ "loss": 1.6539,
+ "step": 658
+ },
+ {
+ "epoch": 0.6591853958925948,
+ "grad_norm": 0.6977367997169495,
+ "learning_rate": 0.00017707403740205071,
+ "loss": 1.4558,
+ "step": 659
+ },
+ {
+ "epoch": 0.6601856772217186,
+ "grad_norm": 0.6594682335853577,
+ "learning_rate": 0.00017700720615775812,
+ "loss": 1.56,
+ "step": 660
+ },
+ {
+ "epoch": 0.6611859585508424,
+ "grad_norm": 0.6146736741065979,
+ "learning_rate": 0.0001769402902964727,
+ "loss": 1.7014,
+ "step": 661
+ },
+ {
+ "epoch": 0.6621862398799663,
+ "grad_norm": 0.7182234525680542,
+ "learning_rate": 0.00017687328989172288,
+ "loss": 1.5655,
+ "step": 662
+ },
+ {
+ "epoch": 0.66318652120909,
+ "grad_norm": 0.6940692067146301,
+ "learning_rate": 0.00017680620501712996,
+ "loss": 1.6177,
+ "step": 663
+ },
+ {
+ "epoch": 0.6641868025382138,
+ "grad_norm": 0.7672961950302124,
+ "learning_rate": 0.00017673903574640814,
+ "loss": 1.559,
+ "step": 664
+ },
+ {
+ "epoch": 0.6651870838673377,
+ "grad_norm": 0.654500424861908,
+ "learning_rate": 0.00017667178215336423,
+ "loss": 1.5024,
+ "step": 665
+ },
+ {
+ "epoch": 0.6661873651964615,
+ "grad_norm": 0.8137261867523193,
+ "learning_rate": 0.0001766044443118978,
+ "loss": 1.7865,
+ "step": 666
+ },
+ {
+ "epoch": 0.6671876465255854,
+ "grad_norm": 0.806624710559845,
+ "learning_rate": 0.000176537022296001,
+ "loss": 1.4944,
+ "step": 667
+ },
+ {
+ "epoch": 0.6681879278547092,
+ "grad_norm": 0.7952747941017151,
+ "learning_rate": 0.00017646951617975837,
+ "loss": 1.5371,
+ "step": 668
+ },
+ {
+ "epoch": 0.6691882091838329,
+ "grad_norm": 0.6380738615989685,
+ "learning_rate": 0.00017640192603734692,
+ "loss": 1.3117,
+ "step": 669
+ },
+ {
+ "epoch": 0.6701884905129568,
+ "grad_norm": 0.6559002995491028,
+ "learning_rate": 0.00017633425194303606,
+ "loss": 1.3662,
+ "step": 670
+ },
+ {
+ "epoch": 0.6711887718420806,
+ "grad_norm": 0.715826153755188,
+ "learning_rate": 0.00017626649397118734,
+ "loss": 1.7271,
+ "step": 671
+ },
+ {
+ "epoch": 0.6721890531712044,
+ "grad_norm": 0.6719872355461121,
+ "learning_rate": 0.00017619865219625452,
+ "loss": 1.747,
+ "step": 672
+ },
+ {
+ "epoch": 0.6731893345003283,
+ "grad_norm": 0.6901715397834778,
+ "learning_rate": 0.00017613072669278343,
+ "loss": 1.6438,
+ "step": 673
+ },
+ {
+ "epoch": 0.674189615829452,
+ "grad_norm": 0.6601479649543762,
+ "learning_rate": 0.00017606271753541192,
+ "loss": 1.8191,
+ "step": 674
+ },
+ {
+ "epoch": 0.6751898971585758,
+ "grad_norm": 0.8059187531471252,
+ "learning_rate": 0.00017599462479886974,
+ "loss": 1.6946,
+ "step": 675
+ },
+ {
+ "epoch": 0.6761901784876997,
+ "grad_norm": 0.6966856718063354,
+ "learning_rate": 0.00017592644855797854,
+ "loss": 1.5551,
+ "step": 676
+ },
+ {
+ "epoch": 0.6771904598168235,
+ "grad_norm": 0.7306144833564758,
+ "learning_rate": 0.00017585818888765168,
+ "loss": 1.5429,
+ "step": 677
+ },
+ {
+ "epoch": 0.6781907411459474,
+ "grad_norm": 0.572907030582428,
+ "learning_rate": 0.0001757898458628941,
+ "loss": 1.4437,
+ "step": 678
+ },
+ {
+ "epoch": 0.6791910224750711,
+ "grad_norm": 0.6807466149330139,
+ "learning_rate": 0.00017572141955880252,
+ "loss": 1.6307,
+ "step": 679
+ },
+ {
+ "epoch": 0.6801913038041949,
+ "grad_norm": 0.7529204487800598,
+ "learning_rate": 0.00017565291005056504,
+ "loss": 1.631,
+ "step": 680
+ },
+ {
+ "epoch": 0.6811915851333188,
+ "grad_norm": 0.6292940378189087,
+ "learning_rate": 0.00017558431741346122,
+ "loss": 1.7512,
+ "step": 681
+ },
+ {
+ "epoch": 0.6821918664624426,
+ "grad_norm": 0.7981480956077576,
+ "learning_rate": 0.00017551564172286197,
+ "loss": 1.7704,
+ "step": 682
+ },
+ {
+ "epoch": 0.6831921477915663,
+ "grad_norm": 0.7816259860992432,
+ "learning_rate": 0.00017544688305422943,
+ "loss": 1.4954,
+ "step": 683
+ },
+ {
+ "epoch": 0.6841924291206902,
+ "grad_norm": 0.6866456866264343,
+ "learning_rate": 0.00017537804148311695,
+ "loss": 1.7986,
+ "step": 684
+ },
+ {
+ "epoch": 0.685192710449814,
+ "grad_norm": 0.7499064803123474,
+ "learning_rate": 0.00017530911708516902,
+ "loss": 1.6472,
+ "step": 685
+ },
+ {
+ "epoch": 0.6861929917789378,
+ "grad_norm": 0.5923457145690918,
+ "learning_rate": 0.00017524010993612098,
+ "loss": 1.4866,
+ "step": 686
+ },
+ {
+ "epoch": 0.6871932731080617,
+ "grad_norm": 0.6991822719573975,
+ "learning_rate": 0.00017517102011179933,
+ "loss": 1.605,
+ "step": 687
+ },
+ {
+ "epoch": 0.6881935544371854,
+ "grad_norm": 0.7880247235298157,
+ "learning_rate": 0.0001751018476881212,
+ "loss": 1.641,
+ "step": 688
+ },
+ {
+ "epoch": 0.6891938357663093,
+ "grad_norm": 0.7848097085952759,
+ "learning_rate": 0.00017503259274109464,
+ "loss": 1.7505,
+ "step": 689
+ },
+ {
+ "epoch": 0.6901941170954331,
+ "grad_norm": 0.693678081035614,
+ "learning_rate": 0.00017496325534681825,
+ "loss": 1.6565,
+ "step": 690
+ },
+ {
+ "epoch": 0.6911943984245569,
+ "grad_norm": 0.8232877254486084,
+ "learning_rate": 0.00017489383558148136,
+ "loss": 1.7664,
+ "step": 691
+ },
+ {
+ "epoch": 0.6921946797536808,
+ "grad_norm": 0.7834855914115906,
+ "learning_rate": 0.00017482433352136365,
+ "loss": 1.4381,
+ "step": 692
+ },
+ {
+ "epoch": 0.6931949610828045,
+ "grad_norm": 0.6186713576316833,
+ "learning_rate": 0.00017475474924283536,
+ "loss": 1.6482,
+ "step": 693
+ },
+ {
+ "epoch": 0.6941952424119283,
+ "grad_norm": 0.7511133551597595,
+ "learning_rate": 0.00017468508282235704,
+ "loss": 1.6186,
+ "step": 694
+ },
+ {
+ "epoch": 0.6951955237410522,
+ "grad_norm": 0.8017745614051819,
+ "learning_rate": 0.00017461533433647946,
+ "loss": 1.6597,
+ "step": 695
+ },
+ {
+ "epoch": 0.696195805070176,
+ "grad_norm": 0.8190794587135315,
+ "learning_rate": 0.00017454550386184362,
+ "loss": 1.6602,
+ "step": 696
+ },
+ {
+ "epoch": 0.6971960863992998,
+ "grad_norm": 0.7479042410850525,
+ "learning_rate": 0.00017447559147518055,
+ "loss": 1.77,
+ "step": 697
+ },
+ {
+ "epoch": 0.6981963677284236,
+ "grad_norm": 0.7239962816238403,
+ "learning_rate": 0.00017440559725331135,
+ "loss": 1.5838,
+ "step": 698
+ },
+ {
+ "epoch": 0.6991966490575474,
+ "grad_norm": 0.7252762317657471,
+ "learning_rate": 0.000174335521273147,
+ "loss": 1.5462,
+ "step": 699
+ },
+ {
+ "epoch": 0.7001969303866713,
+ "grad_norm": 0.9383960962295532,
+ "learning_rate": 0.00017426536361168834,
+ "loss": 1.5104,
+ "step": 700
+ },
+ {
+ "epoch": 0.7011972117157951,
+ "grad_norm": 0.6944159269332886,
+ "learning_rate": 0.00017419512434602594,
+ "loss": 1.6382,
+ "step": 701
+ },
+ {
+ "epoch": 0.7021974930449189,
+ "grad_norm": 0.6809273362159729,
+ "learning_rate": 0.00017412480355334005,
+ "loss": 1.725,
+ "step": 702
+ },
+ {
+ "epoch": 0.7031977743740427,
+ "grad_norm": 0.7521125674247742,
+ "learning_rate": 0.00017405440131090048,
+ "loss": 1.8499,
+ "step": 703
+ },
+ {
+ "epoch": 0.7041980557031665,
+ "grad_norm": 0.6854100227355957,
+ "learning_rate": 0.00017398391769606658,
+ "loss": 1.6648,
+ "step": 704
+ },
+ {
+ "epoch": 0.7051983370322903,
+ "grad_norm": 0.7382327318191528,
+ "learning_rate": 0.00017391335278628712,
+ "loss": 1.5806,
+ "step": 705
+ },
+ {
+ "epoch": 0.7061986183614142,
+ "grad_norm": 0.7387582063674927,
+ "learning_rate": 0.00017384270665910014,
+ "loss": 1.5563,
+ "step": 706
+ },
+ {
+ "epoch": 0.707198899690538,
+ "grad_norm": 0.7698972821235657,
+ "learning_rate": 0.000173771979392133,
+ "loss": 1.6626,
+ "step": 707
+ },
+ {
+ "epoch": 0.7081991810196617,
+ "grad_norm": 0.7639899849891663,
+ "learning_rate": 0.00017370117106310214,
+ "loss": 1.6725,
+ "step": 708
+ },
+ {
+ "epoch": 0.7091994623487856,
+ "grad_norm": 0.6684393286705017,
+ "learning_rate": 0.0001736302817498131,
+ "loss": 1.64,
+ "step": 709
+ },
+ {
+ "epoch": 0.7101997436779094,
+ "grad_norm": 0.6329504251480103,
+ "learning_rate": 0.00017355931153016044,
+ "loss": 1.4472,
+ "step": 710
+ },
+ {
+ "epoch": 0.7112000250070333,
+ "grad_norm": 0.8133587837219238,
+ "learning_rate": 0.0001734882604821276,
+ "loss": 1.7971,
+ "step": 711
+ },
+ {
+ "epoch": 0.712200306336157,
+ "grad_norm": 0.6524143218994141,
+ "learning_rate": 0.0001734171286837868,
+ "loss": 1.5366,
+ "step": 712
+ },
+ {
+ "epoch": 0.7132005876652808,
+ "grad_norm": 0.6714311242103577,
+ "learning_rate": 0.00017334591621329906,
+ "loss": 1.841,
+ "step": 713
+ },
+ {
+ "epoch": 0.7142008689944047,
+ "grad_norm": 0.6690782904624939,
+ "learning_rate": 0.00017327462314891402,
+ "loss": 1.623,
+ "step": 714
+ },
+ {
+ "epoch": 0.7152011503235285,
+ "grad_norm": 0.650442361831665,
+ "learning_rate": 0.00017320324956896977,
+ "loss": 1.6124,
+ "step": 715
+ },
+ {
+ "epoch": 0.7162014316526523,
+ "grad_norm": 0.7075713276863098,
+ "learning_rate": 0.00017313179555189306,
+ "loss": 1.5154,
+ "step": 716
+ },
+ {
+ "epoch": 0.7172017129817762,
+ "grad_norm": 0.729060173034668,
+ "learning_rate": 0.00017306026117619889,
+ "loss": 1.7072,
+ "step": 717
+ },
+ {
+ "epoch": 0.7182019943108999,
+ "grad_norm": 0.8547433614730835,
+ "learning_rate": 0.0001729886465204906,
+ "loss": 1.6237,
+ "step": 718
+ },
+ {
+ "epoch": 0.7192022756400237,
+ "grad_norm": 0.6729336380958557,
+ "learning_rate": 0.0001729169516634598,
+ "loss": 1.7769,
+ "step": 719
+ },
+ {
+ "epoch": 0.7202025569691476,
+ "grad_norm": 0.7437167167663574,
+ "learning_rate": 0.0001728451766838861,
+ "loss": 1.5056,
+ "step": 720
+ },
+ {
+ "epoch": 0.7212028382982714,
+ "grad_norm": 0.6573147177696228,
+ "learning_rate": 0.00017277332166063726,
+ "loss": 1.7694,
+ "step": 721
+ },
+ {
+ "epoch": 0.7222031196273953,
+ "grad_norm": 0.6767126321792603,
+ "learning_rate": 0.00017270138667266894,
+ "loss": 1.6014,
+ "step": 722
+ },
+ {
+ "epoch": 0.723203400956519,
+ "grad_norm": 0.7488179206848145,
+ "learning_rate": 0.00017262937179902472,
+ "loss": 1.573,
+ "step": 723
+ },
+ {
+ "epoch": 0.7242036822856428,
+ "grad_norm": 0.6491002440452576,
+ "learning_rate": 0.00017255727711883588,
+ "loss": 1.6705,
+ "step": 724
+ },
+ {
+ "epoch": 0.7252039636147667,
+ "grad_norm": 0.764090359210968,
+ "learning_rate": 0.00017248510271132144,
+ "loss": 1.6761,
+ "step": 725
+ },
+ {
+ "epoch": 0.7262042449438905,
+ "grad_norm": 0.7116997838020325,
+ "learning_rate": 0.00017241284865578802,
+ "loss": 1.7435,
+ "step": 726
+ },
+ {
+ "epoch": 0.7272045262730142,
+ "grad_norm": 0.6367645859718323,
+ "learning_rate": 0.00017234051503162978,
+ "loss": 1.7061,
+ "step": 727
+ },
+ {
+ "epoch": 0.7282048076021381,
+ "grad_norm": 0.7232155203819275,
+ "learning_rate": 0.0001722681019183283,
+ "loss": 1.8142,
+ "step": 728
+ },
+ {
+ "epoch": 0.7292050889312619,
+ "grad_norm": 0.7533649802207947,
+ "learning_rate": 0.00017219560939545246,
+ "loss": 1.8202,
+ "step": 729
+ },
+ {
+ "epoch": 0.7302053702603858,
+ "grad_norm": 0.6923018097877502,
+ "learning_rate": 0.00017212303754265843,
+ "loss": 1.4925,
+ "step": 730
+ },
+ {
+ "epoch": 0.7312056515895096,
+ "grad_norm": 0.7326932549476624,
+ "learning_rate": 0.0001720503864396896,
+ "loss": 1.5192,
+ "step": 731
+ },
+ {
+ "epoch": 0.7322059329186333,
+ "grad_norm": 0.7220762968063354,
+ "learning_rate": 0.00017197765616637636,
+ "loss": 1.7601,
+ "step": 732
+ },
+ {
+ "epoch": 0.7332062142477572,
+ "grad_norm": 0.605725884437561,
+ "learning_rate": 0.0001719048468026361,
+ "loss": 1.6309,
+ "step": 733
+ },
+ {
+ "epoch": 0.734206495576881,
+ "grad_norm": 0.6728388667106628,
+ "learning_rate": 0.00017183195842847322,
+ "loss": 1.5993,
+ "step": 734
+ },
+ {
+ "epoch": 0.7352067769060048,
+ "grad_norm": 0.7035244703292847,
+ "learning_rate": 0.0001717589911239788,
+ "loss": 1.6031,
+ "step": 735
+ },
+ {
+ "epoch": 0.7362070582351287,
+ "grad_norm": 0.7473010420799255,
+ "learning_rate": 0.00017168594496933074,
+ "loss": 1.5833,
+ "step": 736
+ },
+ {
+ "epoch": 0.7372073395642524,
+ "grad_norm": 0.6310701370239258,
+ "learning_rate": 0.00017161282004479351,
+ "loss": 1.4328,
+ "step": 737
+ },
+ {
+ "epoch": 0.7382076208933762,
+ "grad_norm": 0.6805673837661743,
+ "learning_rate": 0.0001715396164307182,
+ "loss": 1.5429,
+ "step": 738
+ },
+ {
+ "epoch": 0.7392079022225001,
+ "grad_norm": 0.747222900390625,
+ "learning_rate": 0.0001714663342075424,
+ "loss": 1.7696,
+ "step": 739
+ },
+ {
+ "epoch": 0.7402081835516239,
+ "grad_norm": 0.8214403390884399,
+ "learning_rate": 0.00017139297345578994,
+ "loss": 1.5997,
+ "step": 740
+ },
+ {
+ "epoch": 0.7412084648807478,
+ "grad_norm": 0.6722521781921387,
+ "learning_rate": 0.00017131953425607104,
+ "loss": 1.5287,
+ "step": 741
+ },
+ {
+ "epoch": 0.7422087462098715,
+ "grad_norm": 0.6937971115112305,
+ "learning_rate": 0.00017124601668908212,
+ "loss": 1.7263,
+ "step": 742
+ },
+ {
+ "epoch": 0.7432090275389953,
+ "grad_norm": 0.7590844631195068,
+ "learning_rate": 0.00017117242083560568,
+ "loss": 1.7263,
+ "step": 743
+ },
+ {
+ "epoch": 0.7442093088681192,
+ "grad_norm": 0.7913306355476379,
+ "learning_rate": 0.00017109874677651024,
+ "loss": 1.7646,
+ "step": 744
+ },
+ {
+ "epoch": 0.745209590197243,
+ "grad_norm": 0.7123669385910034,
+ "learning_rate": 0.0001710249945927503,
+ "loss": 1.6768,
+ "step": 745
+ },
+ {
+ "epoch": 0.7462098715263668,
+ "grad_norm": 0.8426288366317749,
+ "learning_rate": 0.00017095116436536612,
+ "loss": 1.8496,
+ "step": 746
+ },
+ {
+ "epoch": 0.7472101528554906,
+ "grad_norm": 0.6152015328407288,
+ "learning_rate": 0.00017087725617548385,
+ "loss": 1.4527,
+ "step": 747
+ },
+ {
+ "epoch": 0.7482104341846144,
+ "grad_norm": 0.8348223567008972,
+ "learning_rate": 0.00017080327010431513,
+ "loss": 1.4847,
+ "step": 748
+ },
+ {
+ "epoch": 0.7492107155137382,
+ "grad_norm": 0.7883800268173218,
+ "learning_rate": 0.00017072920623315734,
+ "loss": 1.5941,
+ "step": 749
+ },
+ {
+ "epoch": 0.7502109968428621,
+ "grad_norm": 0.6957768201828003,
+ "learning_rate": 0.00017065506464339326,
+ "loss": 1.7543,
+ "step": 750
+ },
+ {
+ "epoch": 0.7512112781719859,
+ "grad_norm": 0.5898700952529907,
+ "learning_rate": 0.00017058084541649106,
+ "loss": 1.7859,
+ "step": 751
+ },
+ {
+ "epoch": 0.7522115595011097,
+ "grad_norm": 0.6882239580154419,
+ "learning_rate": 0.00017050654863400429,
+ "loss": 1.3233,
+ "step": 752
+ },
+ {
+ "epoch": 0.7532118408302335,
+ "grad_norm": 0.7327316999435425,
+ "learning_rate": 0.00017043217437757164,
+ "loss": 1.5067,
+ "step": 753
+ },
+ {
+ "epoch": 0.7542121221593573,
+ "grad_norm": 0.9257964491844177,
+ "learning_rate": 0.00017035772272891702,
+ "loss": 1.503,
+ "step": 754
+ },
+ {
+ "epoch": 0.7552124034884812,
+ "grad_norm": 0.7924116253852844,
+ "learning_rate": 0.00017028319376984928,
+ "loss": 1.8975,
+ "step": 755
+ },
+ {
+ "epoch": 0.756212684817605,
+ "grad_norm": 0.6651099920272827,
+ "learning_rate": 0.00017020858758226229,
+ "loss": 1.649,
+ "step": 756
+ },
+ {
+ "epoch": 0.7572129661467287,
+ "grad_norm": 0.7257362604141235,
+ "learning_rate": 0.0001701339042481347,
+ "loss": 1.6919,
+ "step": 757
+ },
+ {
+ "epoch": 0.7582132474758526,
+ "grad_norm": 0.8733739852905273,
+ "learning_rate": 0.00017005914384953007,
+ "loss": 1.5929,
+ "step": 758
+ },
+ {
+ "epoch": 0.7592135288049764,
+ "grad_norm": 0.6347383856773376,
+ "learning_rate": 0.00016998430646859654,
+ "loss": 1.3341,
+ "step": 759
+ },
+ {
+ "epoch": 0.7602138101341002,
+ "grad_norm": 0.6915012001991272,
+ "learning_rate": 0.00016990939218756683,
+ "loss": 1.4971,
+ "step": 760
+ },
+ {
+ "epoch": 0.761214091463224,
+ "grad_norm": 0.7862069606781006,
+ "learning_rate": 0.0001698344010887582,
+ "loss": 1.7468,
+ "step": 761
+ },
+ {
+ "epoch": 0.7622143727923478,
+ "grad_norm": 0.7318029403686523,
+ "learning_rate": 0.0001697593332545723,
+ "loss": 1.8143,
+ "step": 762
+ },
+ {
+ "epoch": 0.7632146541214717,
+ "grad_norm": 0.6758155226707458,
+ "learning_rate": 0.0001696841887674951,
+ "loss": 1.6652,
+ "step": 763
+ },
+ {
+ "epoch": 0.7642149354505955,
+ "grad_norm": 0.6853237748146057,
+ "learning_rate": 0.00016960896771009684,
+ "loss": 1.5176,
+ "step": 764
+ },
+ {
+ "epoch": 0.7652152167797193,
+ "grad_norm": 0.9686934351921082,
+ "learning_rate": 0.00016953367016503182,
+ "loss": 1.5366,
+ "step": 765
+ },
+ {
+ "epoch": 0.7662154981088432,
+ "grad_norm": 0.7232028841972351,
+ "learning_rate": 0.00016945829621503838,
+ "loss": 1.6932,
+ "step": 766
+ },
+ {
+ "epoch": 0.7672157794379669,
+ "grad_norm": 0.6606596112251282,
+ "learning_rate": 0.00016938284594293897,
+ "loss": 1.7051,
+ "step": 767
+ },
+ {
+ "epoch": 0.7682160607670907,
+ "grad_norm": 0.6337714195251465,
+ "learning_rate": 0.00016930731943163972,
+ "loss": 1.6505,
+ "step": 768
+ },
+ {
+ "epoch": 0.7692163420962146,
+ "grad_norm": 0.6292264461517334,
+ "learning_rate": 0.00016923171676413063,
+ "loss": 1.7207,
+ "step": 769
+ },
+ {
+ "epoch": 0.7702166234253384,
+ "grad_norm": 0.7183407545089722,
+ "learning_rate": 0.00016915603802348535,
+ "loss": 1.7025,
+ "step": 770
+ },
+ {
+ "epoch": 0.7712169047544621,
+ "grad_norm": 0.805107593536377,
+ "learning_rate": 0.00016908028329286112,
+ "loss": 1.592,
+ "step": 771
+ },
+ {
+ "epoch": 0.772217186083586,
+ "grad_norm": 0.725777804851532,
+ "learning_rate": 0.0001690044526554987,
+ "loss": 1.6714,
+ "step": 772
+ },
+ {
+ "epoch": 0.7732174674127098,
+ "grad_norm": 0.6801775097846985,
+ "learning_rate": 0.00016892854619472223,
+ "loss": 1.5047,
+ "step": 773
+ },
+ {
+ "epoch": 0.7742177487418337,
+ "grad_norm": 0.7701449990272522,
+ "learning_rate": 0.00016885256399393924,
+ "loss": 1.5506,
+ "step": 774
+ },
+ {
+ "epoch": 0.7752180300709575,
+ "grad_norm": 0.6954746842384338,
+ "learning_rate": 0.00016877650613664034,
+ "loss": 1.4859,
+ "step": 775
+ },
+ {
+ "epoch": 0.7762183114000812,
+ "grad_norm": 0.7431885004043579,
+ "learning_rate": 0.00016870037270639942,
+ "loss": 1.6087,
+ "step": 776
+ },
+ {
+ "epoch": 0.7772185927292051,
+ "grad_norm": 0.687329113483429,
+ "learning_rate": 0.0001686241637868734,
+ "loss": 1.7038,
+ "step": 777
+ },
+ {
+ "epoch": 0.7782188740583289,
+ "grad_norm": 0.6656787395477295,
+ "learning_rate": 0.00016854787946180198,
+ "loss": 1.5691,
+ "step": 778
+ },
+ {
+ "epoch": 0.7792191553874527,
+ "grad_norm": 0.7476064562797546,
+ "learning_rate": 0.00016847151981500789,
+ "loss": 1.4972,
+ "step": 779
+ },
+ {
+ "epoch": 0.7802194367165766,
+ "grad_norm": 0.7320332527160645,
+ "learning_rate": 0.00016839508493039657,
+ "loss": 1.7326,
+ "step": 780
+ },
+ {
+ "epoch": 0.7812197180457003,
+ "grad_norm": 0.6432293057441711,
+ "learning_rate": 0.00016831857489195618,
+ "loss": 1.542,
+ "step": 781
+ },
+ {
+ "epoch": 0.7822199993748241,
+ "grad_norm": 0.6751729846000671,
+ "learning_rate": 0.00016824198978375736,
+ "loss": 1.6864,
+ "step": 782
+ },
+ {
+ "epoch": 0.783220280703948,
+ "grad_norm": 0.770193338394165,
+ "learning_rate": 0.00016816532968995328,
+ "loss": 1.5318,
+ "step": 783
+ },
+ {
+ "epoch": 0.7842205620330718,
+ "grad_norm": 0.6820619106292725,
+ "learning_rate": 0.0001680885946947796,
+ "loss": 1.6004,
+ "step": 784
+ },
+ {
+ "epoch": 0.7852208433621957,
+ "grad_norm": 0.9120951294898987,
+ "learning_rate": 0.00016801178488255413,
+ "loss": 1.6506,
+ "step": 785
+ },
+ {
+ "epoch": 0.7862211246913194,
+ "grad_norm": 0.7819542288780212,
+ "learning_rate": 0.00016793490033767698,
+ "loss": 1.5292,
+ "step": 786
+ },
+ {
+ "epoch": 0.7872214060204432,
+ "grad_norm": 0.6647278666496277,
+ "learning_rate": 0.00016785794114463037,
+ "loss": 1.5941,
+ "step": 787
+ },
+ {
+ "epoch": 0.7882216873495671,
+ "grad_norm": 0.6874713897705078,
+ "learning_rate": 0.00016778090738797853,
+ "loss": 1.5543,
+ "step": 788
+ },
+ {
+ "epoch": 0.7892219686786909,
+ "grad_norm": 0.7759424448013306,
+ "learning_rate": 0.00016770379915236766,
+ "loss": 1.6788,
+ "step": 789
+ },
+ {
+ "epoch": 0.7902222500078147,
+ "grad_norm": 0.724583625793457,
+ "learning_rate": 0.00016762661652252567,
+ "loss": 1.5998,
+ "step": 790
+ },
+ {
+ "epoch": 0.7912225313369385,
+ "grad_norm": 0.7921720743179321,
+ "learning_rate": 0.00016754935958326244,
+ "loss": 1.5956,
+ "step": 791
+ },
+ {
+ "epoch": 0.7922228126660623,
+ "grad_norm": 0.6484968662261963,
+ "learning_rate": 0.00016747202841946928,
+ "loss": 1.5708,
+ "step": 792
+ },
+ {
+ "epoch": 0.7932230939951862,
+ "grad_norm": 0.6372153759002686,
+ "learning_rate": 0.00016739462311611919,
+ "loss": 1.5213,
+ "step": 793
+ },
+ {
+ "epoch": 0.79422337532431,
+ "grad_norm": 0.7025095224380493,
+ "learning_rate": 0.00016731714375826657,
+ "loss": 1.4701,
+ "step": 794
+ },
+ {
+ "epoch": 0.7952236566534338,
+ "grad_norm": 0.681094765663147,
+ "learning_rate": 0.00016723959043104728,
+ "loss": 1.5101,
+ "step": 795
+ },
+ {
+ "epoch": 0.7962239379825576,
+ "grad_norm": 0.7129995822906494,
+ "learning_rate": 0.00016716196321967832,
+ "loss": 1.6038,
+ "step": 796
+ },
+ {
+ "epoch": 0.7972242193116814,
+ "grad_norm": 0.7403759360313416,
+ "learning_rate": 0.00016708426220945802,
+ "loss": 1.5906,
+ "step": 797
+ },
+ {
+ "epoch": 0.7982245006408052,
+ "grad_norm": 0.6562372446060181,
+ "learning_rate": 0.00016700648748576574,
+ "loss": 1.6469,
+ "step": 798
+ },
+ {
+ "epoch": 0.7992247819699291,
+ "grad_norm": 0.839885413646698,
+ "learning_rate": 0.0001669286391340618,
+ "loss": 1.5385,
+ "step": 799
+ },
+ {
+ "epoch": 0.8002250632990529,
+ "grad_norm": 0.8687535524368286,
+ "learning_rate": 0.00016685071723988748,
+ "loss": 1.6759,
+ "step": 800
+ },
+ {
+ "epoch": 0.8012253446281766,
+ "grad_norm": 0.6825409531593323,
+ "learning_rate": 0.00016677272188886483,
+ "loss": 1.841,
+ "step": 801
+ },
+ {
+ "epoch": 0.8022256259573005,
+ "grad_norm": 0.6831037402153015,
+ "learning_rate": 0.00016669465316669667,
+ "loss": 1.5476,
+ "step": 802
+ },
+ {
+ "epoch": 0.8032259072864243,
+ "grad_norm": 0.6906002759933472,
+ "learning_rate": 0.00016661651115916642,
+ "loss": 1.6866,
+ "step": 803
+ },
+ {
+ "epoch": 0.8042261886155482,
+ "grad_norm": 0.7675560116767883,
+ "learning_rate": 0.00016653829595213794,
+ "loss": 1.5663,
+ "step": 804
+ },
+ {
+ "epoch": 0.805226469944672,
+ "grad_norm": 0.6594063639640808,
+ "learning_rate": 0.00016646000763155568,
+ "loss": 1.5247,
+ "step": 805
+ },
+ {
+ "epoch": 0.8062267512737957,
+ "grad_norm": 0.7470384836196899,
+ "learning_rate": 0.00016638164628344425,
+ "loss": 1.6468,
+ "step": 806
+ },
+ {
+ "epoch": 0.8072270326029196,
+ "grad_norm": 0.6874479651451111,
+ "learning_rate": 0.00016630321199390867,
+ "loss": 1.5948,
+ "step": 807
+ },
+ {
+ "epoch": 0.8082273139320434,
+ "grad_norm": 0.7301204204559326,
+ "learning_rate": 0.00016622470484913406,
+ "loss": 1.3922,
+ "step": 808
+ },
+ {
+ "epoch": 0.8092275952611672,
+ "grad_norm": 0.6781039834022522,
+ "learning_rate": 0.00016614612493538551,
+ "loss": 1.6054,
+ "step": 809
+ },
+ {
+ "epoch": 0.810227876590291,
+ "grad_norm": 0.6913226246833801,
+ "learning_rate": 0.00016606747233900815,
+ "loss": 1.5754,
+ "step": 810
+ },
+ {
+ "epoch": 0.8112281579194148,
+ "grad_norm": 0.667425811290741,
+ "learning_rate": 0.00016598874714642697,
+ "loss": 1.8492,
+ "step": 811
+ },
+ {
+ "epoch": 0.8122284392485386,
+ "grad_norm": 0.7662241458892822,
+ "learning_rate": 0.00016590994944414678,
+ "loss": 1.8034,
+ "step": 812
+ },
+ {
+ "epoch": 0.8132287205776625,
+ "grad_norm": 0.7574827075004578,
+ "learning_rate": 0.00016583107931875192,
+ "loss": 1.7435,
+ "step": 813
+ },
+ {
+ "epoch": 0.8142290019067863,
+ "grad_norm": 0.9005519151687622,
+ "learning_rate": 0.0001657521368569064,
+ "loss": 1.6769,
+ "step": 814
+ },
+ {
+ "epoch": 0.8152292832359102,
+ "grad_norm": 0.6895585656166077,
+ "learning_rate": 0.0001656731221453537,
+ "loss": 1.7562,
+ "step": 815
+ },
+ {
+ "epoch": 0.8162295645650339,
+ "grad_norm": 0.7573346495628357,
+ "learning_rate": 0.00016559403527091675,
+ "loss": 1.4748,
+ "step": 816
+ },
+ {
+ "epoch": 0.8172298458941577,
+ "grad_norm": 0.7698647975921631,
+ "learning_rate": 0.0001655148763204977,
+ "loss": 1.6174,
+ "step": 817
+ },
+ {
+ "epoch": 0.8182301272232816,
+ "grad_norm": 0.7975410223007202,
+ "learning_rate": 0.00016543564538107797,
+ "loss": 1.7924,
+ "step": 818
+ },
+ {
+ "epoch": 0.8192304085524054,
+ "grad_norm": 0.9687625169754028,
+ "learning_rate": 0.00016535634253971794,
+ "loss": 1.7725,
+ "step": 819
+ },
+ {
+ "epoch": 0.8202306898815291,
+ "grad_norm": 0.6777274012565613,
+ "learning_rate": 0.00016527696788355714,
+ "loss": 1.5018,
+ "step": 820
+ },
+ {
+ "epoch": 0.821230971210653,
+ "grad_norm": 0.6990464329719543,
+ "learning_rate": 0.00016519752149981397,
+ "loss": 1.5804,
+ "step": 821
+ },
+ {
+ "epoch": 0.8222312525397768,
+ "grad_norm": 0.8445940613746643,
+ "learning_rate": 0.0001651180034757856,
+ "loss": 1.8591,
+ "step": 822
+ },
+ {
+ "epoch": 0.8232315338689006,
+ "grad_norm": 0.8462644815444946,
+ "learning_rate": 0.00016503841389884798,
+ "loss": 1.7582,
+ "step": 823
+ },
+ {
+ "epoch": 0.8242318151980245,
+ "grad_norm": 0.7679311037063599,
+ "learning_rate": 0.00016495875285645566,
+ "loss": 1.5971,
+ "step": 824
+ },
+ {
+ "epoch": 0.8252320965271482,
+ "grad_norm": 0.7734447717666626,
+ "learning_rate": 0.00016487902043614173,
+ "loss": 1.714,
+ "step": 825
+ },
+ {
+ "epoch": 0.8262323778562721,
+ "grad_norm": 0.7890239953994751,
+ "learning_rate": 0.0001647992167255177,
+ "loss": 1.6876,
+ "step": 826
+ },
+ {
+ "epoch": 0.8272326591853959,
+ "grad_norm": 0.8530203104019165,
+ "learning_rate": 0.0001647193418122734,
+ "loss": 1.9096,
+ "step": 827
+ },
+ {
+ "epoch": 0.8282329405145197,
+ "grad_norm": 0.7828260064125061,
+ "learning_rate": 0.00016463939578417692,
+ "loss": 1.5518,
+ "step": 828
+ },
+ {
+ "epoch": 0.8292332218436436,
+ "grad_norm": 0.7015512585639954,
+ "learning_rate": 0.0001645593787290745,
+ "loss": 1.49,
+ "step": 829
+ },
+ {
+ "epoch": 0.8302335031727673,
+ "grad_norm": 0.694771409034729,
+ "learning_rate": 0.0001644792907348904,
+ "loss": 1.5506,
+ "step": 830
+ },
+ {
+ "epoch": 0.8312337845018911,
+ "grad_norm": 0.8167857527732849,
+ "learning_rate": 0.00016439913188962685,
+ "loss": 1.7798,
+ "step": 831
+ },
+ {
+ "epoch": 0.832234065831015,
+ "grad_norm": 0.6682108044624329,
+ "learning_rate": 0.0001643189022813639,
+ "loss": 1.6107,
+ "step": 832
+ },
+ {
+ "epoch": 0.8332343471601388,
+ "grad_norm": 0.8347259163856506,
+ "learning_rate": 0.0001642386019982594,
+ "loss": 1.7672,
+ "step": 833
+ },
+ {
+ "epoch": 0.8342346284892626,
+ "grad_norm": 0.6620945334434509,
+ "learning_rate": 0.00016415823112854883,
+ "loss": 1.6975,
+ "step": 834
+ },
+ {
+ "epoch": 0.8352349098183864,
+ "grad_norm": 0.7286327481269836,
+ "learning_rate": 0.00016407778976054526,
+ "loss": 1.5956,
+ "step": 835
+ },
+ {
+ "epoch": 0.8362351911475102,
+ "grad_norm": 0.6344440579414368,
+ "learning_rate": 0.0001639972779826392,
+ "loss": 1.6455,
+ "step": 836
+ },
+ {
+ "epoch": 0.8372354724766341,
+ "grad_norm": 0.6607793569564819,
+ "learning_rate": 0.0001639166958832985,
+ "loss": 1.6739,
+ "step": 837
+ },
+ {
+ "epoch": 0.8382357538057579,
+ "grad_norm": 0.6973574161529541,
+ "learning_rate": 0.00016383604355106837,
+ "loss": 1.8042,
+ "step": 838
+ },
+ {
+ "epoch": 0.8392360351348817,
+ "grad_norm": 0.7744210958480835,
+ "learning_rate": 0.00016375532107457108,
+ "loss": 1.528,
+ "step": 839
+ },
+ {
+ "epoch": 0.8402363164640055,
+ "grad_norm": 0.6944973468780518,
+ "learning_rate": 0.00016367452854250603,
+ "loss": 1.5498,
+ "step": 840
+ },
+ {
+ "epoch": 0.8412365977931293,
+ "grad_norm": 0.6730696558952332,
+ "learning_rate": 0.00016359366604364972,
+ "loss": 1.5849,
+ "step": 841
+ },
+ {
+ "epoch": 0.8422368791222531,
+ "grad_norm": 0.7051465511322021,
+ "learning_rate": 0.00016351273366685526,
+ "loss": 1.5972,
+ "step": 842
+ },
+ {
+ "epoch": 0.843237160451377,
+ "grad_norm": 0.7309426069259644,
+ "learning_rate": 0.00016343173150105278,
+ "loss": 1.4612,
+ "step": 843
+ },
+ {
+ "epoch": 0.8442374417805008,
+ "grad_norm": 0.7830431461334229,
+ "learning_rate": 0.00016335065963524897,
+ "loss": 1.7208,
+ "step": 844
+ },
+ {
+ "epoch": 0.8452377231096245,
+ "grad_norm": 0.8609834909439087,
+ "learning_rate": 0.0001632695181585272,
+ "loss": 1.8229,
+ "step": 845
+ },
+ {
+ "epoch": 0.8462380044387484,
+ "grad_norm": 0.7489060759544373,
+ "learning_rate": 0.00016318830716004722,
+ "loss": 1.6955,
+ "step": 846
+ },
+ {
+ "epoch": 0.8472382857678722,
+ "grad_norm": 0.636900782585144,
+ "learning_rate": 0.00016310702672904528,
+ "loss": 1.6664,
+ "step": 847
+ },
+ {
+ "epoch": 0.8482385670969961,
+ "grad_norm": 0.6423529386520386,
+ "learning_rate": 0.00016302567695483382,
+ "loss": 1.5356,
+ "step": 848
+ },
+ {
+ "epoch": 0.8492388484261199,
+ "grad_norm": 0.7380033731460571,
+ "learning_rate": 0.0001629442579268016,
+ "loss": 1.4482,
+ "step": 849
+ },
+ {
+ "epoch": 0.8502391297552436,
+ "grad_norm": 0.8258544206619263,
+ "learning_rate": 0.00016286276973441333,
+ "loss": 1.7058,
+ "step": 850
+ },
+ {
+ "epoch": 0.8512394110843675,
+ "grad_norm": 0.6473391056060791,
+ "learning_rate": 0.00016278121246720987,
+ "loss": 1.5374,
+ "step": 851
+ },
+ {
+ "epoch": 0.8522396924134913,
+ "grad_norm": 0.7097072005271912,
+ "learning_rate": 0.00016269958621480788,
+ "loss": 1.6786,
+ "step": 852
+ },
+ {
+ "epoch": 0.8532399737426151,
+ "grad_norm": 0.724993884563446,
+ "learning_rate": 0.0001626178910668998,
+ "loss": 1.6022,
+ "step": 853
+ },
+ {
+ "epoch": 0.854240255071739,
+ "grad_norm": 0.6800474524497986,
+ "learning_rate": 0.00016253612711325386,
+ "loss": 1.6382,
+ "step": 854
+ },
+ {
+ "epoch": 0.8552405364008627,
+ "grad_norm": 0.6339759826660156,
+ "learning_rate": 0.0001624542944437139,
+ "loss": 1.5641,
+ "step": 855
+ },
+ {
+ "epoch": 0.8562408177299866,
+ "grad_norm": 0.6792349219322205,
+ "learning_rate": 0.00016237239314819917,
+ "loss": 1.3713,
+ "step": 856
+ },
+ {
+ "epoch": 0.8572410990591104,
+ "grad_norm": 0.6544696688652039,
+ "learning_rate": 0.0001622904233167044,
+ "loss": 1.5639,
+ "step": 857
+ },
+ {
+ "epoch": 0.8582413803882342,
+ "grad_norm": 0.7736073732376099,
+ "learning_rate": 0.0001622083850392996,
+ "loss": 1.5454,
+ "step": 858
+ },
+ {
+ "epoch": 0.859241661717358,
+ "grad_norm": 0.8642422556877136,
+ "learning_rate": 0.00016212627840613003,
+ "loss": 1.6852,
+ "step": 859
+ },
+ {
+ "epoch": 0.8602419430464818,
+ "grad_norm": 0.6520773768424988,
+ "learning_rate": 0.000162044103507416,
+ "loss": 1.5335,
+ "step": 860
+ },
+ {
+ "epoch": 0.8612422243756056,
+ "grad_norm": 0.7647336721420288,
+ "learning_rate": 0.00016196186043345288,
+ "loss": 1.5578,
+ "step": 861
+ },
+ {
+ "epoch": 0.8622425057047295,
+ "grad_norm": 0.9621163010597229,
+ "learning_rate": 0.00016187954927461093,
+ "loss": 1.6976,
+ "step": 862
+ },
+ {
+ "epoch": 0.8632427870338533,
+ "grad_norm": 0.6847056746482849,
+ "learning_rate": 0.00016179717012133521,
+ "loss": 1.7118,
+ "step": 863
+ },
+ {
+ "epoch": 0.864243068362977,
+ "grad_norm": 0.7482467889785767,
+ "learning_rate": 0.00016171472306414554,
+ "loss": 1.6601,
+ "step": 864
+ },
+ {
+ "epoch": 0.8652433496921009,
+ "grad_norm": 0.7760444283485413,
+ "learning_rate": 0.00016163220819363628,
+ "loss": 1.5587,
+ "step": 865
+ },
+ {
+ "epoch": 0.8662436310212247,
+ "grad_norm": 0.8380980491638184,
+ "learning_rate": 0.00016154962560047643,
+ "loss": 1.7171,
+ "step": 866
+ },
+ {
+ "epoch": 0.8672439123503486,
+ "grad_norm": 0.6927618384361267,
+ "learning_rate": 0.00016146697537540924,
+ "loss": 1.7244,
+ "step": 867
+ },
+ {
+ "epoch": 0.8682441936794724,
+ "grad_norm": 0.7855746746063232,
+ "learning_rate": 0.0001613842576092524,
+ "loss": 1.5848,
+ "step": 868
+ },
+ {
+ "epoch": 0.8692444750085961,
+ "grad_norm": 0.6743006110191345,
+ "learning_rate": 0.00016130147239289778,
+ "loss": 1.6969,
+ "step": 869
+ },
+ {
+ "epoch": 0.87024475633772,
+ "grad_norm": 0.7060980200767517,
+ "learning_rate": 0.00016121861981731135,
+ "loss": 1.5632,
+ "step": 870
+ },
+ {
+ "epoch": 0.8712450376668438,
+ "grad_norm": 0.7673144340515137,
+ "learning_rate": 0.00016113569997353312,
+ "loss": 1.5687,
+ "step": 871
+ },
+ {
+ "epoch": 0.8722453189959676,
+ "grad_norm": 0.8105847239494324,
+ "learning_rate": 0.000161052712952677,
+ "loss": 1.6074,
+ "step": 872
+ },
+ {
+ "epoch": 0.8732456003250915,
+ "grad_norm": 0.6536850333213806,
+ "learning_rate": 0.0001609696588459307,
+ "loss": 1.5842,
+ "step": 873
+ },
+ {
+ "epoch": 0.8742458816542152,
+ "grad_norm": 0.6653574705123901,
+ "learning_rate": 0.00016088653774455568,
+ "loss": 1.4652,
+ "step": 874
+ },
+ {
+ "epoch": 0.875246162983339,
+ "grad_norm": 0.7202721238136292,
+ "learning_rate": 0.00016080334973988695,
+ "loss": 1.5212,
+ "step": 875
+ },
+ {
+ "epoch": 0.8762464443124629,
+ "grad_norm": 0.8218807578086853,
+ "learning_rate": 0.00016072009492333318,
+ "loss": 1.803,
+ "step": 876
+ },
+ {
+ "epoch": 0.8772467256415867,
+ "grad_norm": 0.6170400381088257,
+ "learning_rate": 0.0001606367733863763,
+ "loss": 1.5313,
+ "step": 877
+ },
+ {
+ "epoch": 0.8782470069707106,
+ "grad_norm": 0.6750448346138,
+ "learning_rate": 0.00016055338522057158,
+ "loss": 1.6183,
+ "step": 878
+ },
+ {
+ "epoch": 0.8792472882998343,
+ "grad_norm": 0.6602128148078918,
+ "learning_rate": 0.00016046993051754756,
+ "loss": 1.6669,
+ "step": 879
+ },
+ {
+ "epoch": 0.8802475696289581,
+ "grad_norm": 0.7064031958580017,
+ "learning_rate": 0.00016038640936900586,
+ "loss": 1.7458,
+ "step": 880
+ },
+ {
+ "epoch": 0.881247850958082,
+ "grad_norm": 0.5916783809661865,
+ "learning_rate": 0.00016030282186672116,
+ "loss": 1.4966,
+ "step": 881
+ },
+ {
+ "epoch": 0.8822481322872058,
+ "grad_norm": 0.7189202904701233,
+ "learning_rate": 0.00016021916810254097,
+ "loss": 1.5812,
+ "step": 882
+ },
+ {
+ "epoch": 0.8832484136163296,
+ "grad_norm": 0.7760966420173645,
+ "learning_rate": 0.00016013544816838565,
+ "loss": 1.6709,
+ "step": 883
+ },
+ {
+ "epoch": 0.8842486949454534,
+ "grad_norm": 0.6894650459289551,
+ "learning_rate": 0.00016005166215624827,
+ "loss": 1.6255,
+ "step": 884
+ },
+ {
+ "epoch": 0.8852489762745772,
+ "grad_norm": 0.6777058839797974,
+ "learning_rate": 0.0001599678101581945,
+ "loss": 1.7479,
+ "step": 885
+ },
+ {
+ "epoch": 0.886249257603701,
+ "grad_norm": 0.7056024670600891,
+ "learning_rate": 0.00015988389226636253,
+ "loss": 1.7896,
+ "step": 886
+ },
+ {
+ "epoch": 0.8872495389328249,
+ "grad_norm": 0.6465604305267334,
+ "learning_rate": 0.00015979990857296295,
+ "loss": 1.7363,
+ "step": 887
+ },
+ {
+ "epoch": 0.8882498202619487,
+ "grad_norm": 0.6703017950057983,
+ "learning_rate": 0.00015971585917027862,
+ "loss": 1.6617,
+ "step": 888
+ },
+ {
+ "epoch": 0.8892501015910725,
+ "grad_norm": 0.7116142511367798,
+ "learning_rate": 0.00015963174415066468,
+ "loss": 1.8232,
+ "step": 889
+ },
+ {
+ "epoch": 0.8902503829201963,
+ "grad_norm": 0.7552229762077332,
+ "learning_rate": 0.0001595475636065483,
+ "loss": 1.7847,
+ "step": 890
+ },
+ {
+ "epoch": 0.8912506642493201,
+ "grad_norm": 0.70728999376297,
+ "learning_rate": 0.00015946331763042867,
+ "loss": 1.5665,
+ "step": 891
+ },
+ {
+ "epoch": 0.892250945578444,
+ "grad_norm": 0.6701356768608093,
+ "learning_rate": 0.00015937900631487686,
+ "loss": 1.3572,
+ "step": 892
+ },
+ {
+ "epoch": 0.8932512269075678,
+ "grad_norm": 0.6960388422012329,
+ "learning_rate": 0.00015929462975253585,
+ "loss": 1.5815,
+ "step": 893
+ },
+ {
+ "epoch": 0.8942515082366915,
+ "grad_norm": 0.6505674719810486,
+ "learning_rate": 0.00015921018803612014,
+ "loss": 1.7499,
+ "step": 894
+ },
+ {
+ "epoch": 0.8952517895658154,
+ "grad_norm": 0.604205310344696,
+ "learning_rate": 0.0001591256812584159,
+ "loss": 1.6838,
+ "step": 895
+ },
+ {
+ "epoch": 0.8962520708949392,
+ "grad_norm": 0.5875198841094971,
+ "learning_rate": 0.00015904110951228082,
+ "loss": 1.5147,
+ "step": 896
+ },
+ {
+ "epoch": 0.897252352224063,
+ "grad_norm": 0.6970433592796326,
+ "learning_rate": 0.00015895647289064396,
+ "loss": 1.7767,
+ "step": 897
+ },
+ {
+ "epoch": 0.8982526335531869,
+ "grad_norm": 0.7364515066146851,
+ "learning_rate": 0.00015887177148650564,
+ "loss": 1.6672,
+ "step": 898
+ },
+ {
+ "epoch": 0.8992529148823106,
+ "grad_norm": 0.7843589186668396,
+ "learning_rate": 0.0001587870053929374,
+ "loss": 1.689,
+ "step": 899
+ },
+ {
+ "epoch": 0.9002531962114345,
+ "grad_norm": 0.6405196189880371,
+ "learning_rate": 0.00015870217470308188,
+ "loss": 1.5917,
+ "step": 900
+ },
+ {
+ "epoch": 0.9012534775405583,
+ "grad_norm": 0.7019757628440857,
+ "learning_rate": 0.0001586172795101526,
+ "loss": 1.5497,
+ "step": 901
+ },
+ {
+ "epoch": 0.9022537588696821,
+ "grad_norm": 0.8048270344734192,
+ "learning_rate": 0.00015853231990743406,
+ "loss": 1.5821,
+ "step": 902
+ },
+ {
+ "epoch": 0.903254040198806,
+ "grad_norm": 0.6245777606964111,
+ "learning_rate": 0.0001584472959882815,
+ "loss": 1.5688,
+ "step": 903
+ },
+ {
+ "epoch": 0.9042543215279297,
+ "grad_norm": 0.6584132313728333,
+ "learning_rate": 0.00015836220784612085,
+ "loss": 1.4555,
+ "step": 904
+ },
+ {
+ "epoch": 0.9052546028570535,
+ "grad_norm": 0.7710773944854736,
+ "learning_rate": 0.00015827705557444852,
+ "loss": 1.6416,
+ "step": 905
+ },
+ {
+ "epoch": 0.9062548841861774,
+ "grad_norm": 0.6738126277923584,
+ "learning_rate": 0.00015819183926683153,
+ "loss": 1.6272,
+ "step": 906
+ },
+ {
+ "epoch": 0.9072551655153012,
+ "grad_norm": 0.6698735356330872,
+ "learning_rate": 0.00015810655901690715,
+ "loss": 1.4778,
+ "step": 907
+ },
+ {
+ "epoch": 0.9082554468444249,
+ "grad_norm": 1.0088928937911987,
+ "learning_rate": 0.00015802121491838297,
+ "loss": 1.6854,
+ "step": 908
+ },
+ {
+ "epoch": 0.9092557281735488,
+ "grad_norm": 0.6948708891868591,
+ "learning_rate": 0.0001579358070650367,
+ "loss": 1.5673,
+ "step": 909
+ },
+ {
+ "epoch": 0.9102560095026726,
+ "grad_norm": 0.6728948950767517,
+ "learning_rate": 0.00015785033555071616,
+ "loss": 1.6646,
+ "step": 910
+ },
+ {
+ "epoch": 0.9112562908317965,
+ "grad_norm": 0.8096952438354492,
+ "learning_rate": 0.00015776480046933905,
+ "loss": 1.4675,
+ "step": 911
+ },
+ {
+ "epoch": 0.9122565721609203,
+ "grad_norm": 0.6625403761863708,
+ "learning_rate": 0.000157679201914893,
+ "loss": 1.4793,
+ "step": 912
+ },
+ {
+ "epoch": 0.913256853490044,
+ "grad_norm": 0.7129424810409546,
+ "learning_rate": 0.00015759353998143528,
+ "loss": 1.574,
+ "step": 913
+ },
+ {
+ "epoch": 0.9142571348191679,
+ "grad_norm": 0.6151349544525146,
+ "learning_rate": 0.00015750781476309288,
+ "loss": 1.5631,
+ "step": 914
+ },
+ {
+ "epoch": 0.9152574161482917,
+ "grad_norm": 0.7185074687004089,
+ "learning_rate": 0.00015742202635406235,
+ "loss": 1.8382,
+ "step": 915
+ },
+ {
+ "epoch": 0.9162576974774155,
+ "grad_norm": 0.7076066732406616,
+ "learning_rate": 0.00015733617484860963,
+ "loss": 1.5394,
+ "step": 916
+ },
+ {
+ "epoch": 0.9172579788065394,
+ "grad_norm": 0.7286276817321777,
+ "learning_rate": 0.00015725026034106996,
+ "loss": 1.8139,
+ "step": 917
+ },
+ {
+ "epoch": 0.9182582601356631,
+ "grad_norm": 0.757075846195221,
+ "learning_rate": 0.00015716428292584787,
+ "loss": 1.6768,
+ "step": 918
+ },
+ {
+ "epoch": 0.919258541464787,
+ "grad_norm": 0.6926739811897278,
+ "learning_rate": 0.00015707824269741702,
+ "loss": 1.4541,
+ "step": 919
+ },
+ {
+ "epoch": 0.9202588227939108,
+ "grad_norm": 0.6489847898483276,
+ "learning_rate": 0.00015699213975031996,
+ "loss": 1.4725,
+ "step": 920
+ },
+ {
+ "epoch": 0.9212591041230346,
+ "grad_norm": 0.7668707966804504,
+ "learning_rate": 0.0001569059741791684,
+ "loss": 1.4239,
+ "step": 921
+ },
+ {
+ "epoch": 0.9222593854521585,
+ "grad_norm": 0.736863911151886,
+ "learning_rate": 0.0001568197460786426,
+ "loss": 1.6117,
+ "step": 922
+ },
+ {
+ "epoch": 0.9232596667812822,
+ "grad_norm": 0.8462884426116943,
+ "learning_rate": 0.0001567334555434917,
+ "loss": 1.5025,
+ "step": 923
+ },
+ {
+ "epoch": 0.924259948110406,
+ "grad_norm": 0.7481950521469116,
+ "learning_rate": 0.0001566471026685334,
+ "loss": 1.5024,
+ "step": 924
+ },
+ {
+ "epoch": 0.9252602294395299,
+ "grad_norm": 0.6457516551017761,
+ "learning_rate": 0.00015656068754865387,
+ "loss": 1.4526,
+ "step": 925
+ },
+ {
+ "epoch": 0.9262605107686537,
+ "grad_norm": 0.809140682220459,
+ "learning_rate": 0.00015647421027880772,
+ "loss": 1.4449,
+ "step": 926
+ },
+ {
+ "epoch": 0.9272607920977775,
+ "grad_norm": 0.6967790126800537,
+ "learning_rate": 0.0001563876709540178,
+ "loss": 1.5552,
+ "step": 927
+ },
+ {
+ "epoch": 0.9282610734269013,
+ "grad_norm": 0.6858595609664917,
+ "learning_rate": 0.0001563010696693752,
+ "loss": 1.6202,
+ "step": 928
+ },
+ {
+ "epoch": 0.9292613547560251,
+ "grad_norm": 0.7033559679985046,
+ "learning_rate": 0.00015621440652003907,
+ "loss": 1.7186,
+ "step": 929
+ },
+ {
+ "epoch": 0.930261636085149,
+ "grad_norm": 0.6527283787727356,
+ "learning_rate": 0.00015612768160123652,
+ "loss": 1.5028,
+ "step": 930
+ },
+ {
+ "epoch": 0.9312619174142728,
+ "grad_norm": 0.7243602275848389,
+ "learning_rate": 0.00015604089500826257,
+ "loss": 1.6729,
+ "step": 931
+ },
+ {
+ "epoch": 0.9322621987433966,
+ "grad_norm": 0.6734297275543213,
+ "learning_rate": 0.00015595404683648,
+ "loss": 1.4731,
+ "step": 932
+ },
+ {
+ "epoch": 0.9332624800725204,
+ "grad_norm": 0.7641247510910034,
+ "learning_rate": 0.00015586713718131922,
+ "loss": 1.5851,
+ "step": 933
+ },
+ {
+ "epoch": 0.9342627614016442,
+ "grad_norm": 0.7062788009643555,
+ "learning_rate": 0.0001557801661382782,
+ "loss": 1.5735,
+ "step": 934
+ },
+ {
+ "epoch": 0.935263042730768,
+ "grad_norm": 0.6413556337356567,
+ "learning_rate": 0.00015569313380292248,
+ "loss": 1.5854,
+ "step": 935
+ },
+ {
+ "epoch": 0.9362633240598919,
+ "grad_norm": 0.645720362663269,
+ "learning_rate": 0.00015560604027088477,
+ "loss": 1.5072,
+ "step": 936
+ },
+ {
+ "epoch": 0.9372636053890157,
+ "grad_norm": 0.6726225018501282,
+ "learning_rate": 0.00015551888563786515,
+ "loss": 1.587,
+ "step": 937
+ },
+ {
+ "epoch": 0.9382638867181394,
+ "grad_norm": 0.7043680548667908,
+ "learning_rate": 0.00015543166999963076,
+ "loss": 1.6577,
+ "step": 938
+ },
+ {
+ "epoch": 0.9392641680472633,
+ "grad_norm": 0.7049617767333984,
+ "learning_rate": 0.0001553443934520159,
+ "loss": 1.7624,
+ "step": 939
+ },
+ {
+ "epoch": 0.9402644493763871,
+ "grad_norm": 0.7060776352882385,
+ "learning_rate": 0.00015525705609092157,
+ "loss": 1.6208,
+ "step": 940
+ },
+ {
+ "epoch": 0.941264730705511,
+ "grad_norm": 0.6215025186538696,
+ "learning_rate": 0.00015516965801231586,
+ "loss": 1.4645,
+ "step": 941
+ },
+ {
+ "epoch": 0.9422650120346348,
+ "grad_norm": 0.7021099328994751,
+ "learning_rate": 0.0001550821993122334,
+ "loss": 1.566,
+ "step": 942
+ },
+ {
+ "epoch": 0.9432652933637585,
+ "grad_norm": 0.6451042294502258,
+ "learning_rate": 0.0001549946800867755,
+ "loss": 1.7491,
+ "step": 943
+ },
+ {
+ "epoch": 0.9442655746928824,
+ "grad_norm": 0.7288572192192078,
+ "learning_rate": 0.00015490710043210997,
+ "loss": 1.6302,
+ "step": 944
+ },
+ {
+ "epoch": 0.9452658560220062,
+ "grad_norm": 0.7850833535194397,
+ "learning_rate": 0.00015481946044447099,
+ "loss": 1.5673,
+ "step": 945
+ },
+ {
+ "epoch": 0.94626613735113,
+ "grad_norm": 0.7459181547164917,
+ "learning_rate": 0.00015473176022015906,
+ "loss": 1.4529,
+ "step": 946
+ },
+ {
+ "epoch": 0.9472664186802539,
+ "grad_norm": 0.7002627849578857,
+ "learning_rate": 0.0001546439998555409,
+ "loss": 1.8814,
+ "step": 947
+ },
+ {
+ "epoch": 0.9482667000093776,
+ "grad_norm": 0.6664572358131409,
+ "learning_rate": 0.0001545561794470492,
+ "loss": 1.5337,
+ "step": 948
+ },
+ {
+ "epoch": 0.9492669813385014,
+ "grad_norm": 0.757116973400116,
+ "learning_rate": 0.00015446829909118275,
+ "loss": 1.5775,
+ "step": 949
+ },
+ {
+ "epoch": 0.9502672626676253,
+ "grad_norm": 0.7456643581390381,
+ "learning_rate": 0.00015438035888450623,
+ "loss": 1.525,
+ "step": 950
+ },
+ {
+ "epoch": 0.9512675439967491,
+ "grad_norm": 0.6722500920295715,
+ "learning_rate": 0.00015429235892364994,
+ "loss": 1.5059,
+ "step": 951
+ },
+ {
+ "epoch": 0.952267825325873,
+ "grad_norm": 0.7431210279464722,
+ "learning_rate": 0.00015420429930530996,
+ "loss": 1.6867,
+ "step": 952
+ },
+ {
+ "epoch": 0.9532681066549967,
+ "grad_norm": 0.751015305519104,
+ "learning_rate": 0.00015411618012624786,
+ "loss": 1.7371,
+ "step": 953
+ },
+ {
+ "epoch": 0.9542683879841205,
+ "grad_norm": 0.807579517364502,
+ "learning_rate": 0.00015402800148329071,
+ "loss": 1.7353,
+ "step": 954
+ },
+ {
+ "epoch": 0.9552686693132444,
+ "grad_norm": 0.608161449432373,
+ "learning_rate": 0.00015393976347333088,
+ "loss": 1.3074,
+ "step": 955
+ },
+ {
+ "epoch": 0.9562689506423682,
+ "grad_norm": 0.7092815637588501,
+ "learning_rate": 0.00015385146619332596,
+ "loss": 1.676,
+ "step": 956
+ },
+ {
+ "epoch": 0.9572692319714919,
+ "grad_norm": 0.7639429569244385,
+ "learning_rate": 0.00015376310974029873,
+ "loss": 1.6452,
+ "step": 957
+ },
+ {
+ "epoch": 0.9582695133006158,
+ "grad_norm": 0.7333659529685974,
+ "learning_rate": 0.00015367469421133695,
+ "loss": 1.6821,
+ "step": 958
+ },
+ {
+ "epoch": 0.9592697946297396,
+ "grad_norm": 0.7246838212013245,
+ "learning_rate": 0.00015358621970359325,
+ "loss": 1.5078,
+ "step": 959
+ },
+ {
+ "epoch": 0.9602700759588634,
+ "grad_norm": 0.7209622859954834,
+ "learning_rate": 0.00015349768631428519,
+ "loss": 1.5617,
+ "step": 960
+ },
+ {
+ "epoch": 0.9612703572879873,
+ "grad_norm": 0.7034916877746582,
+ "learning_rate": 0.00015340909414069488,
+ "loss": 1.4711,
+ "step": 961
+ },
+ {
+ "epoch": 0.962270638617111,
+ "grad_norm": 0.7311360239982605,
+ "learning_rate": 0.00015332044328016914,
+ "loss": 1.6488,
+ "step": 962
+ },
+ {
+ "epoch": 0.9632709199462349,
+ "grad_norm": 0.6668992638587952,
+ "learning_rate": 0.0001532317338301192,
+ "loss": 1.6804,
+ "step": 963
+ },
+ {
+ "epoch": 0.9642712012753587,
+ "grad_norm": 0.6265329122543335,
+ "learning_rate": 0.00015314296588802076,
+ "loss": 1.8169,
+ "step": 964
+ },
+ {
+ "epoch": 0.9652714826044825,
+ "grad_norm": 0.6945448517799377,
+ "learning_rate": 0.00015305413955141365,
+ "loss": 1.8041,
+ "step": 965
+ },
+ {
+ "epoch": 0.9662717639336064,
+ "grad_norm": 0.6718643307685852,
+ "learning_rate": 0.00015296525491790205,
+ "loss": 1.3486,
+ "step": 966
+ },
+ {
+ "epoch": 0.9672720452627301,
+ "grad_norm": 0.6232700943946838,
+ "learning_rate": 0.00015287631208515406,
+ "loss": 1.5672,
+ "step": 967
+ },
+ {
+ "epoch": 0.9682723265918539,
+ "grad_norm": 0.7481172680854797,
+ "learning_rate": 0.00015278731115090171,
+ "loss": 1.5992,
+ "step": 968
+ },
+ {
+ "epoch": 0.9692726079209778,
+ "grad_norm": 0.6585466861724854,
+ "learning_rate": 0.00015269825221294098,
+ "loss": 1.6403,
+ "step": 969
+ },
+ {
+ "epoch": 0.9702728892501016,
+ "grad_norm": 0.7587956786155701,
+ "learning_rate": 0.00015260913536913154,
+ "loss": 1.7991,
+ "step": 970
+ },
+ {
+ "epoch": 0.9712731705792254,
+ "grad_norm": 0.672698974609375,
+ "learning_rate": 0.00015251996071739664,
+ "loss": 1.4311,
+ "step": 971
+ },
+ {
+ "epoch": 0.9722734519083492,
+ "grad_norm": 0.7597199082374573,
+ "learning_rate": 0.00015243072835572318,
+ "loss": 1.5692,
+ "step": 972
+ },
+ {
+ "epoch": 0.973273733237473,
+ "grad_norm": 0.7342745661735535,
+ "learning_rate": 0.0001523414383821613,
+ "loss": 1.6364,
+ "step": 973
+ },
+ {
+ "epoch": 0.9742740145665969,
+ "grad_norm": 0.6640815138816833,
+ "learning_rate": 0.00015225209089482462,
+ "loss": 1.5113,
+ "step": 974
+ },
+ {
+ "epoch": 0.9752742958957207,
+ "grad_norm": 0.6298378109931946,
+ "learning_rate": 0.0001521626859918898,
+ "loss": 1.4822,
+ "step": 975
+ },
+ {
+ "epoch": 0.9762745772248445,
+ "grad_norm": 0.6862055659294128,
+ "learning_rate": 0.00015207322377159668,
+ "loss": 1.6159,
+ "step": 976
+ },
+ {
+ "epoch": 0.9772748585539683,
+ "grad_norm": 0.6377236843109131,
+ "learning_rate": 0.00015198370433224805,
+ "loss": 1.6046,
+ "step": 977
+ },
+ {
+ "epoch": 0.9782751398830921,
+ "grad_norm": 0.620070219039917,
+ "learning_rate": 0.00015189412777220958,
+ "loss": 1.589,
+ "step": 978
+ },
+ {
+ "epoch": 0.9792754212122159,
+ "grad_norm": 0.7776119112968445,
+ "learning_rate": 0.00015180449418990976,
+ "loss": 1.485,
+ "step": 979
+ },
+ {
+ "epoch": 0.9802757025413398,
+ "grad_norm": 0.8258413076400757,
+ "learning_rate": 0.00015171480368383964,
+ "loss": 1.5615,
+ "step": 980
+ },
+ {
+ "epoch": 0.9812759838704636,
+ "grad_norm": 0.7297958135604858,
+ "learning_rate": 0.00015162505635255287,
+ "loss": 1.5408,
+ "step": 981
+ },
+ {
+ "epoch": 0.9822762651995874,
+ "grad_norm": 0.5848103165626526,
+ "learning_rate": 0.00015153525229466555,
+ "loss": 1.6821,
+ "step": 982
+ },
+ {
+ "epoch": 0.9832765465287112,
+ "grad_norm": 0.7375655174255371,
+ "learning_rate": 0.00015144539160885613,
+ "loss": 1.7568,
+ "step": 983
+ },
+ {
+ "epoch": 0.984276827857835,
+ "grad_norm": 0.7466885447502136,
+ "learning_rate": 0.00015135547439386516,
+ "loss": 1.5805,
+ "step": 984
+ },
+ {
+ "epoch": 0.9852771091869589,
+ "grad_norm": 0.6645593047142029,
+ "learning_rate": 0.0001512655007484955,
+ "loss": 1.6776,
+ "step": 985
+ },
+ {
+ "epoch": 0.9862773905160827,
+ "grad_norm": 0.7973874807357788,
+ "learning_rate": 0.00015117547077161185,
+ "loss": 1.4931,
+ "step": 986
+ },
+ {
+ "epoch": 0.9872776718452064,
+ "grad_norm": 0.685391902923584,
+ "learning_rate": 0.0001510853845621409,
+ "loss": 1.6254,
+ "step": 987
+ },
+ {
+ "epoch": 0.9882779531743303,
+ "grad_norm": 0.6562414765357971,
+ "learning_rate": 0.00015099524221907107,
+ "loss": 1.6677,
+ "step": 988
+ },
+ {
+ "epoch": 0.9892782345034541,
+ "grad_norm": 0.6216359734535217,
+ "learning_rate": 0.0001509050438414525,
+ "loss": 1.6107,
+ "step": 989
+ },
+ {
+ "epoch": 0.9902785158325779,
+ "grad_norm": 0.7108810544013977,
+ "learning_rate": 0.00015081478952839693,
+ "loss": 1.5268,
+ "step": 990
+ },
+ {
+ "epoch": 0.9912787971617018,
+ "grad_norm": 0.7076026797294617,
+ "learning_rate": 0.00015072447937907753,
+ "loss": 1.3716,
+ "step": 991
+ },
+ {
+ "epoch": 0.9922790784908255,
+ "grad_norm": 0.6056272983551025,
+ "learning_rate": 0.00015063411349272877,
+ "loss": 1.4931,
+ "step": 992
+ },
+ {
+ "epoch": 0.9932793598199494,
+ "grad_norm": 0.726671576499939,
+ "learning_rate": 0.00015054369196864644,
+ "loss": 1.6409,
+ "step": 993
+ },
+ {
+ "epoch": 0.9942796411490732,
+ "grad_norm": 0.7019214630126953,
+ "learning_rate": 0.00015045321490618748,
+ "loss": 1.4476,
+ "step": 994
+ },
+ {
+ "epoch": 0.995279922478197,
+ "grad_norm": 0.755043625831604,
+ "learning_rate": 0.00015036268240476978,
+ "loss": 1.6674,
+ "step": 995
+ },
+ {
+ "epoch": 0.9962802038073209,
+ "grad_norm": 0.7450313568115234,
+ "learning_rate": 0.00015027209456387218,
+ "loss": 1.3706,
+ "step": 996
+ },
+ {
+ "epoch": 0.9972804851364446,
+ "grad_norm": 0.6804680228233337,
+ "learning_rate": 0.00015018145148303438,
+ "loss": 1.3878,
+ "step": 997
+ },
+ {
+ "epoch": 0.9982807664655684,
+ "grad_norm": 0.7353954315185547,
+ "learning_rate": 0.00015009075326185667,
+ "loss": 1.8656,
+ "step": 998
+ },
+ {
+ "epoch": 0.9992810477946923,
+ "grad_norm": 0.7213340401649475,
+ "learning_rate": 0.00015000000000000001,
+ "loss": 1.6031,
+ "step": 999
+ },
+ {
+ "epoch": 1.0002813291238162,
+ "grad_norm": 0.7066403031349182,
+ "learning_rate": 0.00014990919179718584,
+ "loss": 1.3663,
+ "step": 1000
+ },
+ {
+ "epoch": 1.0012816104529398,
+ "grad_norm": 0.6104635000228882,
+ "learning_rate": 0.00014981832875319597,
+ "loss": 1.3155,
+ "step": 1001
+ },
+ {
+ "epoch": 1.0022818917820637,
+ "grad_norm": 0.7524546384811401,
+ "learning_rate": 0.00014972741096787242,
+ "loss": 1.2042,
+ "step": 1002
+ },
+ {
+ "epoch": 1.0032821731111876,
+ "grad_norm": 0.6831395626068115,
+ "learning_rate": 0.0001496364385411174,
+ "loss": 1.3909,
+ "step": 1003
+ },
+ {
+ "epoch": 1.0042824544403113,
+ "grad_norm": 0.6223152875900269,
+ "learning_rate": 0.0001495454115728932,
+ "loss": 1.2693,
+ "step": 1004
+ },
+ {
+ "epoch": 1.0052827357694352,
+ "grad_norm": 0.6630414128303528,
+ "learning_rate": 0.0001494543301632219,
+ "loss": 1.5871,
+ "step": 1005
+ },
+ {
+ "epoch": 1.006283017098559,
+ "grad_norm": 0.6211387515068054,
+ "learning_rate": 0.00014936319441218555,
+ "loss": 1.5096,
+ "step": 1006
+ },
+ {
+ "epoch": 1.0072832984276827,
+ "grad_norm": 0.7009375095367432,
+ "learning_rate": 0.0001492720044199259,
+ "loss": 1.4553,
+ "step": 1007
+ },
+ {
+ "epoch": 1.0082835797568066,
+ "grad_norm": 0.607667088508606,
+ "learning_rate": 0.0001491807602866442,
+ "loss": 1.4655,
+ "step": 1008
+ },
+ {
+ "epoch": 1.0092838610859305,
+ "grad_norm": 0.7168284058570862,
+ "learning_rate": 0.00014908946211260123,
+ "loss": 1.32,
+ "step": 1009
+ },
+ {
+ "epoch": 1.0102841424150542,
+ "grad_norm": 0.6472702622413635,
+ "learning_rate": 0.00014899810999811726,
+ "loss": 1.418,
+ "step": 1010
+ },
+ {
+ "epoch": 1.011284423744178,
+ "grad_norm": 0.6901958584785461,
+ "learning_rate": 0.0001489067040435717,
+ "loss": 1.5842,
+ "step": 1011
+ },
+ {
+ "epoch": 1.012284705073302,
+ "grad_norm": 0.6948314905166626,
+ "learning_rate": 0.00014881524434940313,
+ "loss": 1.3352,
+ "step": 1012
+ },
+ {
+ "epoch": 1.0132849864024256,
+ "grad_norm": 0.6064580082893372,
+ "learning_rate": 0.0001487237310161093,
+ "loss": 1.2467,
+ "step": 1013
+ },
+ {
+ "epoch": 1.0142852677315495,
+ "grad_norm": 0.5783251523971558,
+ "learning_rate": 0.0001486321641442467,
+ "loss": 1.3932,
+ "step": 1014
+ },
+ {
+ "epoch": 1.0152855490606734,
+ "grad_norm": 0.6915367245674133,
+ "learning_rate": 0.00014854054383443081,
+ "loss": 1.5062,
+ "step": 1015
+ },
+ {
+ "epoch": 1.016285830389797,
+ "grad_norm": 0.7143461108207703,
+ "learning_rate": 0.00014844887018733582,
+ "loss": 1.3284,
+ "step": 1016
+ },
+ {
+ "epoch": 1.017286111718921,
+ "grad_norm": 0.7030971050262451,
+ "learning_rate": 0.00014835714330369446,
+ "loss": 1.5919,
+ "step": 1017
+ },
+ {
+ "epoch": 1.0182863930480448,
+ "grad_norm": 0.7102513909339905,
+ "learning_rate": 0.00014826536328429795,
+ "loss": 1.4448,
+ "step": 1018
+ },
+ {
+ "epoch": 1.0192866743771685,
+ "grad_norm": 0.6152640581130981,
+ "learning_rate": 0.000148173530229996,
+ "loss": 1.4771,
+ "step": 1019
+ },
+ {
+ "epoch": 1.0202869557062924,
+ "grad_norm": 0.6302015781402588,
+ "learning_rate": 0.00014808164424169647,
+ "loss": 1.3969,
+ "step": 1020
+ },
+ {
+ "epoch": 1.0212872370354162,
+ "grad_norm": 0.8721572756767273,
+ "learning_rate": 0.0001479897054203655,
+ "loss": 1.3515,
+ "step": 1021
+ },
+ {
+ "epoch": 1.0222875183645401,
+ "grad_norm": 1.096592903137207,
+ "learning_rate": 0.00014789771386702717,
+ "loss": 1.4757,
+ "step": 1022
+ },
+ {
+ "epoch": 1.0232877996936638,
+ "grad_norm": 0.7684335112571716,
+ "learning_rate": 0.0001478056696827636,
+ "loss": 1.2521,
+ "step": 1023
+ },
+ {
+ "epoch": 1.0242880810227877,
+ "grad_norm": 0.6189197301864624,
+ "learning_rate": 0.0001477135729687147,
+ "loss": 1.4304,
+ "step": 1024
+ },
+ {
+ "epoch": 1.0252883623519116,
+ "grad_norm": 0.6061127781867981,
+ "learning_rate": 0.0001476214238260781,
+ "loss": 1.4236,
+ "step": 1025
+ },
+ {
+ "epoch": 1.0262886436810352,
+ "grad_norm": 0.5413788557052612,
+ "learning_rate": 0.000147529222356109,
+ "loss": 1.1392,
+ "step": 1026
+ },
+ {
+ "epoch": 1.0272889250101591,
+ "grad_norm": 0.6879326105117798,
+ "learning_rate": 0.0001474369686601202,
+ "loss": 1.3966,
+ "step": 1027
+ },
+ {
+ "epoch": 1.028289206339283,
+ "grad_norm": 0.817315936088562,
+ "learning_rate": 0.0001473446628394818,
+ "loss": 1.6747,
+ "step": 1028
+ },
+ {
+ "epoch": 1.0292894876684067,
+ "grad_norm": 0.7139183282852173,
+ "learning_rate": 0.00014725230499562119,
+ "loss": 1.5432,
+ "step": 1029
+ },
+ {
+ "epoch": 1.0302897689975306,
+ "grad_norm": 0.7536730766296387,
+ "learning_rate": 0.00014715989523002296,
+ "loss": 1.5839,
+ "step": 1030
+ },
+ {
+ "epoch": 1.0312900503266544,
+ "grad_norm": 0.7000136375427246,
+ "learning_rate": 0.00014706743364422878,
+ "loss": 1.3519,
+ "step": 1031
+ },
+ {
+ "epoch": 1.032290331655778,
+ "grad_norm": 0.6579506993293762,
+ "learning_rate": 0.00014697492033983707,
+ "loss": 1.3622,
+ "step": 1032
+ },
+ {
+ "epoch": 1.033290612984902,
+ "grad_norm": 0.6257238984107971,
+ "learning_rate": 0.00014688235541850337,
+ "loss": 1.4393,
+ "step": 1033
+ },
+ {
+ "epoch": 1.0342908943140259,
+ "grad_norm": 0.749273955821991,
+ "learning_rate": 0.0001467897389819397,
+ "loss": 1.5201,
+ "step": 1034
+ },
+ {
+ "epoch": 1.0352911756431495,
+ "grad_norm": 0.7008610963821411,
+ "learning_rate": 0.00014669707113191483,
+ "loss": 1.3041,
+ "step": 1035
+ },
+ {
+ "epoch": 1.0362914569722734,
+ "grad_norm": 0.6838043332099915,
+ "learning_rate": 0.0001466043519702539,
+ "loss": 1.435,
+ "step": 1036
+ },
+ {
+ "epoch": 1.0372917383013973,
+ "grad_norm": 0.6197534799575806,
+ "learning_rate": 0.00014651158159883855,
+ "loss": 1.3806,
+ "step": 1037
+ },
+ {
+ "epoch": 1.038292019630521,
+ "grad_norm": 0.6906173825263977,
+ "learning_rate": 0.0001464187601196066,
+ "loss": 1.3898,
+ "step": 1038
+ },
+ {
+ "epoch": 1.0392923009596449,
+ "grad_norm": 0.5627701282501221,
+ "learning_rate": 0.00014632588763455212,
+ "loss": 1.3949,
+ "step": 1039
+ },
+ {
+ "epoch": 1.0402925822887688,
+ "grad_norm": 0.6588866710662842,
+ "learning_rate": 0.00014623296424572517,
+ "loss": 1.4041,
+ "step": 1040
+ },
+ {
+ "epoch": 1.0412928636178926,
+ "grad_norm": 0.7941678762435913,
+ "learning_rate": 0.00014613999005523174,
+ "loss": 1.429,
+ "step": 1041
+ },
+ {
+ "epoch": 1.0422931449470163,
+ "grad_norm": 0.5834561586380005,
+ "learning_rate": 0.00014604696516523361,
+ "loss": 1.4007,
+ "step": 1042
+ },
+ {
+ "epoch": 1.0432934262761402,
+ "grad_norm": 0.5992164015769958,
+ "learning_rate": 0.00014595388967794835,
+ "loss": 1.4029,
+ "step": 1043
+ },
+ {
+ "epoch": 1.044293707605264,
+ "grad_norm": 0.6714745759963989,
+ "learning_rate": 0.00014586076369564908,
+ "loss": 1.4421,
+ "step": 1044
+ },
+ {
+ "epoch": 1.0452939889343877,
+ "grad_norm": 0.6675744652748108,
+ "learning_rate": 0.00014576758732066442,
+ "loss": 1.4663,
+ "step": 1045
+ },
+ {
+ "epoch": 1.0462942702635116,
+ "grad_norm": 0.6605483293533325,
+ "learning_rate": 0.00014567436065537835,
+ "loss": 1.3919,
+ "step": 1046
+ },
+ {
+ "epoch": 1.0472945515926355,
+ "grad_norm": 0.6836503744125366,
+ "learning_rate": 0.00014558108380223012,
+ "loss": 1.3428,
+ "step": 1047
+ },
+ {
+ "epoch": 1.0482948329217592,
+ "grad_norm": 0.6451092958450317,
+ "learning_rate": 0.00014548775686371412,
+ "loss": 1.3717,
+ "step": 1048
+ },
+ {
+ "epoch": 1.049295114250883,
+ "grad_norm": 0.6579246520996094,
+ "learning_rate": 0.00014539437994237977,
+ "loss": 1.7364,
+ "step": 1049
+ },
+ {
+ "epoch": 1.050295395580007,
+ "grad_norm": 0.625912070274353,
+ "learning_rate": 0.00014530095314083143,
+ "loss": 1.5574,
+ "step": 1050
+ },
+ {
+ "epoch": 1.0512956769091306,
+ "grad_norm": 0.7133544087409973,
+ "learning_rate": 0.00014520747656172824,
+ "loss": 1.6031,
+ "step": 1051
+ },
+ {
+ "epoch": 1.0522959582382545,
+ "grad_norm": 0.6956666111946106,
+ "learning_rate": 0.00014511395030778406,
+ "loss": 1.6075,
+ "step": 1052
+ },
+ {
+ "epoch": 1.0532962395673784,
+ "grad_norm": 0.7082141041755676,
+ "learning_rate": 0.00014502037448176734,
+ "loss": 1.3839,
+ "step": 1053
+ },
+ {
+ "epoch": 1.054296520896502,
+ "grad_norm": 0.696561872959137,
+ "learning_rate": 0.000144926749186501,
+ "loss": 1.6738,
+ "step": 1054
+ },
+ {
+ "epoch": 1.055296802225626,
+ "grad_norm": 0.6995558142662048,
+ "learning_rate": 0.00014483307452486227,
+ "loss": 1.4732,
+ "step": 1055
+ },
+ {
+ "epoch": 1.0562970835547498,
+ "grad_norm": 0.7434210181236267,
+ "learning_rate": 0.0001447393505997827,
+ "loss": 1.4207,
+ "step": 1056
+ },
+ {
+ "epoch": 1.0572973648838735,
+ "grad_norm": 0.6679419279098511,
+ "learning_rate": 0.00014464557751424793,
+ "loss": 1.397,
+ "step": 1057
+ },
+ {
+ "epoch": 1.0582976462129974,
+ "grad_norm": 0.6747702360153198,
+ "learning_rate": 0.00014455175537129758,
+ "loss": 1.5247,
+ "step": 1058
+ },
+ {
+ "epoch": 1.0592979275421213,
+ "grad_norm": 0.6184663772583008,
+ "learning_rate": 0.00014445788427402528,
+ "loss": 1.2086,
+ "step": 1059
+ },
+ {
+ "epoch": 1.0602982088712452,
+ "grad_norm": 0.6546644568443298,
+ "learning_rate": 0.00014436396432557835,
+ "loss": 1.3795,
+ "step": 1060
+ },
+ {
+ "epoch": 1.0612984902003688,
+ "grad_norm": 0.6418478488922119,
+ "learning_rate": 0.00014426999562915782,
+ "loss": 1.3997,
+ "step": 1061
+ },
+ {
+ "epoch": 1.0622987715294927,
+ "grad_norm": 0.6456977725028992,
+ "learning_rate": 0.00014417597828801832,
+ "loss": 1.347,
+ "step": 1062
+ },
+ {
+ "epoch": 1.0632990528586166,
+ "grad_norm": 0.7379586696624756,
+ "learning_rate": 0.0001440819124054679,
+ "loss": 1.4168,
+ "step": 1063
+ },
+ {
+ "epoch": 1.0642993341877403,
+ "grad_norm": 0.583483099937439,
+ "learning_rate": 0.00014398779808486793,
+ "loss": 1.3724,
+ "step": 1064
+ },
+ {
+ "epoch": 1.0652996155168641,
+ "grad_norm": 0.8881146311759949,
+ "learning_rate": 0.00014389363542963306,
+ "loss": 1.2834,
+ "step": 1065
+ },
+ {
+ "epoch": 1.066299896845988,
+ "grad_norm": 0.6458824276924133,
+ "learning_rate": 0.000143799424543231,
+ "loss": 1.2557,
+ "step": 1066
+ },
+ {
+ "epoch": 1.0673001781751117,
+ "grad_norm": 0.8149404525756836,
+ "learning_rate": 0.0001437051655291825,
+ "loss": 1.5179,
+ "step": 1067
+ },
+ {
+ "epoch": 1.0683004595042356,
+ "grad_norm": 0.8752502202987671,
+ "learning_rate": 0.0001436108584910611,
+ "loss": 1.3922,
+ "step": 1068
+ },
+ {
+ "epoch": 1.0693007408333595,
+ "grad_norm": 0.6741296648979187,
+ "learning_rate": 0.0001435165035324933,
+ "loss": 1.464,
+ "step": 1069
+ },
+ {
+ "epoch": 1.0703010221624831,
+ "grad_norm": 0.6555476784706116,
+ "learning_rate": 0.000143422100757158,
+ "loss": 1.3172,
+ "step": 1070
+ },
+ {
+ "epoch": 1.071301303491607,
+ "grad_norm": 0.660168468952179,
+ "learning_rate": 0.00014332765026878687,
+ "loss": 1.3089,
+ "step": 1071
+ },
+ {
+ "epoch": 1.072301584820731,
+ "grad_norm": 0.8213777542114258,
+ "learning_rate": 0.0001432331521711639,
+ "loss": 1.4487,
+ "step": 1072
+ },
+ {
+ "epoch": 1.0733018661498546,
+ "grad_norm": 0.6642137765884399,
+ "learning_rate": 0.00014313860656812536,
+ "loss": 1.1624,
+ "step": 1073
+ },
+ {
+ "epoch": 1.0743021474789785,
+ "grad_norm": 0.6304247975349426,
+ "learning_rate": 0.00014304401356355983,
+ "loss": 1.2725,
+ "step": 1074
+ },
+ {
+ "epoch": 1.0753024288081023,
+ "grad_norm": 0.6976219415664673,
+ "learning_rate": 0.00014294937326140788,
+ "loss": 1.6664,
+ "step": 1075
+ },
+ {
+ "epoch": 1.076302710137226,
+ "grad_norm": 0.6528605818748474,
+ "learning_rate": 0.00014285468576566207,
+ "loss": 1.3489,
+ "step": 1076
+ },
+ {
+ "epoch": 1.07730299146635,
+ "grad_norm": 0.7203120589256287,
+ "learning_rate": 0.00014275995118036693,
+ "loss": 1.4319,
+ "step": 1077
+ },
+ {
+ "epoch": 1.0783032727954738,
+ "grad_norm": 0.8259358406066895,
+ "learning_rate": 0.00014266516960961852,
+ "loss": 1.3707,
+ "step": 1078
+ },
+ {
+ "epoch": 1.0793035541245974,
+ "grad_norm": 0.9485010504722595,
+ "learning_rate": 0.00014257034115756472,
+ "loss": 1.6787,
+ "step": 1079
+ },
+ {
+ "epoch": 1.0803038354537213,
+ "grad_norm": 0.6732786893844604,
+ "learning_rate": 0.0001424754659284048,
+ "loss": 1.4184,
+ "step": 1080
+ },
+ {
+ "epoch": 1.0813041167828452,
+ "grad_norm": 0.6673377752304077,
+ "learning_rate": 0.0001423805440263895,
+ "loss": 1.5084,
+ "step": 1081
+ },
+ {
+ "epoch": 1.0823043981119689,
+ "grad_norm": 0.6682411432266235,
+ "learning_rate": 0.0001422855755558208,
+ "loss": 1.4034,
+ "step": 1082
+ },
+ {
+ "epoch": 1.0833046794410928,
+ "grad_norm": 0.6940018534660339,
+ "learning_rate": 0.00014219056062105193,
+ "loss": 1.6816,
+ "step": 1083
+ },
+ {
+ "epoch": 1.0843049607702167,
+ "grad_norm": 0.7052391767501831,
+ "learning_rate": 0.0001420954993264871,
+ "loss": 1.5849,
+ "step": 1084
+ },
+ {
+ "epoch": 1.0853052420993405,
+ "grad_norm": 0.7090102434158325,
+ "learning_rate": 0.00014200039177658145,
+ "loss": 1.2906,
+ "step": 1085
+ },
+ {
+ "epoch": 1.0863055234284642,
+ "grad_norm": 0.7664905190467834,
+ "learning_rate": 0.000141905238075841,
+ "loss": 1.5504,
+ "step": 1086
+ },
+ {
+ "epoch": 1.087305804757588,
+ "grad_norm": 0.6557911038398743,
+ "learning_rate": 0.00014181003832882248,
+ "loss": 1.5846,
+ "step": 1087
+ },
+ {
+ "epoch": 1.088306086086712,
+ "grad_norm": 0.6342834234237671,
+ "learning_rate": 0.00014171479264013311,
+ "loss": 1.2405,
+ "step": 1088
+ },
+ {
+ "epoch": 1.0893063674158356,
+ "grad_norm": 0.7152488827705383,
+ "learning_rate": 0.00014161950111443077,
+ "loss": 1.5047,
+ "step": 1089
+ },
+ {
+ "epoch": 1.0903066487449595,
+ "grad_norm": 0.6031161546707153,
+ "learning_rate": 0.00014152416385642357,
+ "loss": 1.3203,
+ "step": 1090
+ },
+ {
+ "epoch": 1.0913069300740834,
+ "grad_norm": 0.6475042700767517,
+ "learning_rate": 0.00014142878097086995,
+ "loss": 1.191,
+ "step": 1091
+ },
+ {
+ "epoch": 1.092307211403207,
+ "grad_norm": 0.7956790924072266,
+ "learning_rate": 0.0001413333525625784,
+ "loss": 1.3601,
+ "step": 1092
+ },
+ {
+ "epoch": 1.093307492732331,
+ "grad_norm": 0.6703265309333801,
+ "learning_rate": 0.00014123787873640754,
+ "loss": 1.374,
+ "step": 1093
+ },
+ {
+ "epoch": 1.0943077740614549,
+ "grad_norm": 0.7583750486373901,
+ "learning_rate": 0.00014114235959726575,
+ "loss": 1.3064,
+ "step": 1094
+ },
+ {
+ "epoch": 1.0953080553905785,
+ "grad_norm": 0.6749271154403687,
+ "learning_rate": 0.0001410467952501114,
+ "loss": 1.4501,
+ "step": 1095
+ },
+ {
+ "epoch": 1.0963083367197024,
+ "grad_norm": 0.6708521842956543,
+ "learning_rate": 0.00014095118579995235,
+ "loss": 1.5046,
+ "step": 1096
+ },
+ {
+ "epoch": 1.0973086180488263,
+ "grad_norm": 0.5871726870536804,
+ "learning_rate": 0.0001408555313518461,
+ "loss": 1.3549,
+ "step": 1097
+ },
+ {
+ "epoch": 1.09830889937795,
+ "grad_norm": 0.6886669397354126,
+ "learning_rate": 0.00014075983201089964,
+ "loss": 1.3131,
+ "step": 1098
+ },
+ {
+ "epoch": 1.0993091807070738,
+ "grad_norm": 0.6243886351585388,
+ "learning_rate": 0.0001406640878822692,
+ "loss": 1.2278,
+ "step": 1099
+ },
+ {
+ "epoch": 1.1003094620361977,
+ "grad_norm": 0.7198624610900879,
+ "learning_rate": 0.00014056829907116024,
+ "loss": 1.4459,
+ "step": 1100
+ },
+ {
+ "epoch": 1.1013097433653214,
+ "grad_norm": 0.8059262037277222,
+ "learning_rate": 0.00014047246568282736,
+ "loss": 1.473,
+ "step": 1101
+ },
+ {
+ "epoch": 1.1023100246944453,
+ "grad_norm": 0.8409417271614075,
+ "learning_rate": 0.00014037658782257414,
+ "loss": 1.6688,
+ "step": 1102
+ },
+ {
+ "epoch": 1.1033103060235692,
+ "grad_norm": 0.739276111125946,
+ "learning_rate": 0.00014028066559575302,
+ "loss": 1.6182,
+ "step": 1103
+ },
+ {
+ "epoch": 1.104310587352693,
+ "grad_norm": 0.6282714009284973,
+ "learning_rate": 0.00014018469910776513,
+ "loss": 1.2835,
+ "step": 1104
+ },
+ {
+ "epoch": 1.1053108686818167,
+ "grad_norm": 0.7133497595787048,
+ "learning_rate": 0.0001400886884640603,
+ "loss": 1.5798,
+ "step": 1105
+ },
+ {
+ "epoch": 1.1063111500109406,
+ "grad_norm": 0.6376346945762634,
+ "learning_rate": 0.00013999263377013693,
+ "loss": 1.1436,
+ "step": 1106
+ },
+ {
+ "epoch": 1.1073114313400645,
+ "grad_norm": 0.5934734344482422,
+ "learning_rate": 0.00013989653513154165,
+ "loss": 1.3204,
+ "step": 1107
+ },
+ {
+ "epoch": 1.1083117126691882,
+ "grad_norm": 0.6655352115631104,
+ "learning_rate": 0.00013980039265386955,
+ "loss": 1.4602,
+ "step": 1108
+ },
+ {
+ "epoch": 1.109311993998312,
+ "grad_norm": 0.7147901058197021,
+ "learning_rate": 0.00013970420644276383,
+ "loss": 1.4124,
+ "step": 1109
+ },
+ {
+ "epoch": 1.110312275327436,
+ "grad_norm": 0.7845139503479004,
+ "learning_rate": 0.0001396079766039157,
+ "loss": 1.5831,
+ "step": 1110
+ },
+ {
+ "epoch": 1.1113125566565596,
+ "grad_norm": 0.8100587129592896,
+ "learning_rate": 0.00013951170324306435,
+ "loss": 1.6218,
+ "step": 1111
+ },
+ {
+ "epoch": 1.1123128379856835,
+ "grad_norm": 0.689988374710083,
+ "learning_rate": 0.00013941538646599687,
+ "loss": 1.2396,
+ "step": 1112
+ },
+ {
+ "epoch": 1.1133131193148074,
+ "grad_norm": 0.6771540641784668,
+ "learning_rate": 0.0001393190263785479,
+ "loss": 1.3739,
+ "step": 1113
+ },
+ {
+ "epoch": 1.114313400643931,
+ "grad_norm": 0.6424306631088257,
+ "learning_rate": 0.0001392226230865998,
+ "loss": 1.1653,
+ "step": 1114
+ },
+ {
+ "epoch": 1.115313681973055,
+ "grad_norm": 0.6135202646255493,
+ "learning_rate": 0.0001391261766960823,
+ "loss": 1.1924,
+ "step": 1115
+ },
+ {
+ "epoch": 1.1163139633021788,
+ "grad_norm": 0.6751917004585266,
+ "learning_rate": 0.00013902968731297255,
+ "loss": 1.4491,
+ "step": 1116
+ },
+ {
+ "epoch": 1.1173142446313025,
+ "grad_norm": 0.875303328037262,
+ "learning_rate": 0.00013893315504329498,
+ "loss": 1.3918,
+ "step": 1117
+ },
+ {
+ "epoch": 1.1183145259604264,
+ "grad_norm": 0.7102020978927612,
+ "learning_rate": 0.00013883657999312109,
+ "loss": 1.463,
+ "step": 1118
+ },
+ {
+ "epoch": 1.1193148072895502,
+ "grad_norm": 0.6863378882408142,
+ "learning_rate": 0.00013873996226856933,
+ "loss": 1.3958,
+ "step": 1119
+ },
+ {
+ "epoch": 1.120315088618674,
+ "grad_norm": 0.6769587397575378,
+ "learning_rate": 0.00013864330197580513,
+ "loss": 1.3044,
+ "step": 1120
+ },
+ {
+ "epoch": 1.1213153699477978,
+ "grad_norm": 0.7217769026756287,
+ "learning_rate": 0.0001385465992210407,
+ "loss": 1.6125,
+ "step": 1121
+ },
+ {
+ "epoch": 1.1223156512769217,
+ "grad_norm": 0.6756213903427124,
+ "learning_rate": 0.00013844985411053492,
+ "loss": 1.3658,
+ "step": 1122
+ },
+ {
+ "epoch": 1.1233159326060456,
+ "grad_norm": 0.7109145522117615,
+ "learning_rate": 0.00013835306675059308,
+ "loss": 1.5698,
+ "step": 1123
+ },
+ {
+ "epoch": 1.1243162139351692,
+ "grad_norm": 0.5903546810150146,
+ "learning_rate": 0.00013825623724756704,
+ "loss": 1.4429,
+ "step": 1124
+ },
+ {
+ "epoch": 1.1253164952642931,
+ "grad_norm": 0.7500163912773132,
+ "learning_rate": 0.00013815936570785487,
+ "loss": 1.2482,
+ "step": 1125
+ },
+ {
+ "epoch": 1.1263167765934168,
+ "grad_norm": 0.6458998918533325,
+ "learning_rate": 0.00013806245223790088,
+ "loss": 1.3496,
+ "step": 1126
+ },
+ {
+ "epoch": 1.1273170579225407,
+ "grad_norm": 0.627657413482666,
+ "learning_rate": 0.0001379654969441955,
+ "loss": 1.4847,
+ "step": 1127
+ },
+ {
+ "epoch": 1.1283173392516646,
+ "grad_norm": 0.7440046072006226,
+ "learning_rate": 0.000137868499933275,
+ "loss": 1.782,
+ "step": 1128
+ },
+ {
+ "epoch": 1.1293176205807884,
+ "grad_norm": 0.6717308163642883,
+ "learning_rate": 0.00013777146131172162,
+ "loss": 1.6345,
+ "step": 1129
+ },
+ {
+ "epoch": 1.130317901909912,
+ "grad_norm": 0.6480956673622131,
+ "learning_rate": 0.00013767438118616318,
+ "loss": 1.2862,
+ "step": 1130
+ },
+ {
+ "epoch": 1.131318183239036,
+ "grad_norm": 0.6778338551521301,
+ "learning_rate": 0.00013757725966327322,
+ "loss": 1.4821,
+ "step": 1131
+ },
+ {
+ "epoch": 1.1323184645681599,
+ "grad_norm": 0.6759636402130127,
+ "learning_rate": 0.00013748009684977073,
+ "loss": 1.5988,
+ "step": 1132
+ },
+ {
+ "epoch": 1.1333187458972835,
+ "grad_norm": 0.674404501914978,
+ "learning_rate": 0.0001373828928524201,
+ "loss": 1.4744,
+ "step": 1133
+ },
+ {
+ "epoch": 1.1343190272264074,
+ "grad_norm": 0.6017488241195679,
+ "learning_rate": 0.00013728564777803088,
+ "loss": 1.6296,
+ "step": 1134
+ },
+ {
+ "epoch": 1.1353193085555313,
+ "grad_norm": 0.6459933519363403,
+ "learning_rate": 0.00013718836173345783,
+ "loss": 1.5347,
+ "step": 1135
+ },
+ {
+ "epoch": 1.136319589884655,
+ "grad_norm": 0.6578681468963623,
+ "learning_rate": 0.00013709103482560078,
+ "loss": 1.4101,
+ "step": 1136
+ },
+ {
+ "epoch": 1.1373198712137789,
+ "grad_norm": 0.5906695127487183,
+ "learning_rate": 0.00013699366716140435,
+ "loss": 1.422,
+ "step": 1137
+ },
+ {
+ "epoch": 1.1383201525429028,
+ "grad_norm": 0.5622004866600037,
+ "learning_rate": 0.00013689625884785798,
+ "loss": 1.2805,
+ "step": 1138
+ },
+ {
+ "epoch": 1.1393204338720264,
+ "grad_norm": 0.7057269811630249,
+ "learning_rate": 0.00013679880999199583,
+ "loss": 1.5009,
+ "step": 1139
+ },
+ {
+ "epoch": 1.1403207152011503,
+ "grad_norm": 0.6655155420303345,
+ "learning_rate": 0.00013670132070089653,
+ "loss": 1.3504,
+ "step": 1140
+ },
+ {
+ "epoch": 1.1413209965302742,
+ "grad_norm": 0.6448667645454407,
+ "learning_rate": 0.00013660379108168324,
+ "loss": 1.4345,
+ "step": 1141
+ },
+ {
+ "epoch": 1.142321277859398,
+ "grad_norm": 0.6696295142173767,
+ "learning_rate": 0.00013650622124152334,
+ "loss": 1.3248,
+ "step": 1142
+ },
+ {
+ "epoch": 1.1433215591885217,
+ "grad_norm": 0.8913035988807678,
+ "learning_rate": 0.0001364086112876284,
+ "loss": 1.3148,
+ "step": 1143
+ },
+ {
+ "epoch": 1.1443218405176456,
+ "grad_norm": 0.6853944659233093,
+ "learning_rate": 0.00013631096132725413,
+ "loss": 1.4361,
+ "step": 1144
+ },
+ {
+ "epoch": 1.1453221218467693,
+ "grad_norm": 0.6286287307739258,
+ "learning_rate": 0.00013621327146770025,
+ "loss": 1.4485,
+ "step": 1145
+ },
+ {
+ "epoch": 1.1463224031758932,
+ "grad_norm": 0.6847277283668518,
+ "learning_rate": 0.00013611554181631013,
+ "loss": 1.4095,
+ "step": 1146
+ },
+ {
+ "epoch": 1.147322684505017,
+ "grad_norm": 0.6514857411384583,
+ "learning_rate": 0.00013601777248047105,
+ "loss": 1.4106,
+ "step": 1147
+ },
+ {
+ "epoch": 1.148322965834141,
+ "grad_norm": 0.6113057732582092,
+ "learning_rate": 0.0001359199635676138,
+ "loss": 1.3483,
+ "step": 1148
+ },
+ {
+ "epoch": 1.1493232471632646,
+ "grad_norm": 0.6366062164306641,
+ "learning_rate": 0.00013582211518521273,
+ "loss": 1.4164,
+ "step": 1149
+ },
+ {
+ "epoch": 1.1503235284923885,
+ "grad_norm": 0.6680336594581604,
+ "learning_rate": 0.00013572422744078551,
+ "loss": 1.5326,
+ "step": 1150
+ },
+ {
+ "epoch": 1.1513238098215124,
+ "grad_norm": 0.6046566367149353,
+ "learning_rate": 0.00013562630044189304,
+ "loss": 1.4139,
+ "step": 1151
+ },
+ {
+ "epoch": 1.152324091150636,
+ "grad_norm": 0.5869380235671997,
+ "learning_rate": 0.00013552833429613938,
+ "loss": 1.2859,
+ "step": 1152
+ },
+ {
+ "epoch": 1.15332437247976,
+ "grad_norm": 0.6923080682754517,
+ "learning_rate": 0.0001354303291111716,
+ "loss": 1.3419,
+ "step": 1153
+ },
+ {
+ "epoch": 1.1543246538088838,
+ "grad_norm": 0.698279082775116,
+ "learning_rate": 0.0001353322849946797,
+ "loss": 1.4796,
+ "step": 1154
+ },
+ {
+ "epoch": 1.1553249351380075,
+ "grad_norm": 0.6980450749397278,
+ "learning_rate": 0.00013523420205439646,
+ "loss": 1.5941,
+ "step": 1155
+ },
+ {
+ "epoch": 1.1563252164671314,
+ "grad_norm": 0.7222338914871216,
+ "learning_rate": 0.0001351360803980972,
+ "loss": 1.5019,
+ "step": 1156
+ },
+ {
+ "epoch": 1.1573254977962553,
+ "grad_norm": 0.6446929574012756,
+ "learning_rate": 0.00013503792013359997,
+ "loss": 1.2706,
+ "step": 1157
+ },
+ {
+ "epoch": 1.158325779125379,
+ "grad_norm": 0.699488639831543,
+ "learning_rate": 0.00013493972136876509,
+ "loss": 1.5775,
+ "step": 1158
+ },
+ {
+ "epoch": 1.1593260604545028,
+ "grad_norm": 0.6865110397338867,
+ "learning_rate": 0.00013484148421149527,
+ "loss": 1.5015,
+ "step": 1159
+ },
+ {
+ "epoch": 1.1603263417836267,
+ "grad_norm": 0.800570547580719,
+ "learning_rate": 0.0001347432087697354,
+ "loss": 1.401,
+ "step": 1160
+ },
+ {
+ "epoch": 1.1613266231127504,
+ "grad_norm": 0.706388533115387,
+ "learning_rate": 0.00013464489515147238,
+ "loss": 1.2895,
+ "step": 1161
+ },
+ {
+ "epoch": 1.1623269044418743,
+ "grad_norm": 0.7967466711997986,
+ "learning_rate": 0.0001345465434647351,
+ "loss": 1.848,
+ "step": 1162
+ },
+ {
+ "epoch": 1.1633271857709981,
+ "grad_norm": 0.7130827903747559,
+ "learning_rate": 0.00013444815381759425,
+ "loss": 1.2899,
+ "step": 1163
+ },
+ {
+ "epoch": 1.1643274671001218,
+ "grad_norm": 0.6437693238258362,
+ "learning_rate": 0.00013434972631816235,
+ "loss": 1.4456,
+ "step": 1164
+ },
+ {
+ "epoch": 1.1653277484292457,
+ "grad_norm": 0.6305271983146667,
+ "learning_rate": 0.0001342512610745933,
+ "loss": 1.3375,
+ "step": 1165
+ },
+ {
+ "epoch": 1.1663280297583696,
+ "grad_norm": 0.6622384786605835,
+ "learning_rate": 0.0001341527581950827,
+ "loss": 1.6379,
+ "step": 1166
+ },
+ {
+ "epoch": 1.1673283110874935,
+ "grad_norm": 0.64511638879776,
+ "learning_rate": 0.00013405421778786737,
+ "loss": 1.2818,
+ "step": 1167
+ },
+ {
+ "epoch": 1.1683285924166171,
+ "grad_norm": 0.6575477719306946,
+ "learning_rate": 0.00013395563996122537,
+ "loss": 1.2264,
+ "step": 1168
+ },
+ {
+ "epoch": 1.169328873745741,
+ "grad_norm": 0.787896990776062,
+ "learning_rate": 0.00013385702482347593,
+ "loss": 1.6519,
+ "step": 1169
+ },
+ {
+ "epoch": 1.170329155074865,
+ "grad_norm": 0.7512592077255249,
+ "learning_rate": 0.00013375837248297926,
+ "loss": 1.4776,
+ "step": 1170
+ },
+ {
+ "epoch": 1.1713294364039886,
+ "grad_norm": 0.9541054964065552,
+ "learning_rate": 0.0001336596830481364,
+ "loss": 1.5383,
+ "step": 1171
+ },
+ {
+ "epoch": 1.1723297177331125,
+ "grad_norm": 0.7181218266487122,
+ "learning_rate": 0.0001335609566273892,
+ "loss": 1.4088,
+ "step": 1172
+ },
+ {
+ "epoch": 1.1733299990622363,
+ "grad_norm": 0.7449761629104614,
+ "learning_rate": 0.00013346219332922016,
+ "loss": 1.2313,
+ "step": 1173
+ },
+ {
+ "epoch": 1.17433028039136,
+ "grad_norm": 0.7783718705177307,
+ "learning_rate": 0.00013336339326215228,
+ "loss": 1.4114,
+ "step": 1174
+ },
+ {
+ "epoch": 1.175330561720484,
+ "grad_norm": 0.8479213714599609,
+ "learning_rate": 0.00013326455653474897,
+ "loss": 1.6336,
+ "step": 1175
+ },
+ {
+ "epoch": 1.1763308430496078,
+ "grad_norm": 0.7903116941452026,
+ "learning_rate": 0.00013316568325561393,
+ "loss": 1.3251,
+ "step": 1176
+ },
+ {
+ "epoch": 1.1773311243787314,
+ "grad_norm": 0.7016749978065491,
+ "learning_rate": 0.00013306677353339098,
+ "loss": 1.5013,
+ "step": 1177
+ },
+ {
+ "epoch": 1.1783314057078553,
+ "grad_norm": 0.6183115839958191,
+ "learning_rate": 0.000132967827476764,
+ "loss": 1.3984,
+ "step": 1178
+ },
+ {
+ "epoch": 1.1793316870369792,
+ "grad_norm": 0.6227801442146301,
+ "learning_rate": 0.0001328688451944569,
+ "loss": 1.2306,
+ "step": 1179
+ },
+ {
+ "epoch": 1.1803319683661029,
+ "grad_norm": 0.7611119151115417,
+ "learning_rate": 0.00013276982679523322,
+ "loss": 1.3648,
+ "step": 1180
+ },
+ {
+ "epoch": 1.1813322496952268,
+ "grad_norm": 0.6929368376731873,
+ "learning_rate": 0.00013267077238789633,
+ "loss": 1.5107,
+ "step": 1181
+ },
+ {
+ "epoch": 1.1823325310243507,
+ "grad_norm": 0.6637099385261536,
+ "learning_rate": 0.00013257168208128908,
+ "loss": 1.5103,
+ "step": 1182
+ },
+ {
+ "epoch": 1.1833328123534743,
+ "grad_norm": 0.6320933103561401,
+ "learning_rate": 0.00013247255598429378,
+ "loss": 1.4024,
+ "step": 1183
+ },
+ {
+ "epoch": 1.1843330936825982,
+ "grad_norm": 0.8260888457298279,
+ "learning_rate": 0.00013237339420583212,
+ "loss": 1.4385,
+ "step": 1184
+ },
+ {
+ "epoch": 1.185333375011722,
+ "grad_norm": 0.7872930765151978,
+ "learning_rate": 0.00013227419685486492,
+ "loss": 1.2566,
+ "step": 1185
+ },
+ {
+ "epoch": 1.186333656340846,
+ "grad_norm": 0.6857215762138367,
+ "learning_rate": 0.00013217496404039218,
+ "loss": 1.619,
+ "step": 1186
+ },
+ {
+ "epoch": 1.1873339376699696,
+ "grad_norm": 0.708858072757721,
+ "learning_rate": 0.0001320756958714528,
+ "loss": 1.2228,
+ "step": 1187
+ },
+ {
+ "epoch": 1.1883342189990935,
+ "grad_norm": 0.6442694067955017,
+ "learning_rate": 0.00013197639245712454,
+ "loss": 1.5542,
+ "step": 1188
+ },
+ {
+ "epoch": 1.1893345003282172,
+ "grad_norm": 0.5615749359130859,
+ "learning_rate": 0.00013187705390652388,
+ "loss": 1.5289,
+ "step": 1189
+ },
+ {
+ "epoch": 1.190334781657341,
+ "grad_norm": 0.6250069737434387,
+ "learning_rate": 0.00013177768032880593,
+ "loss": 1.3849,
+ "step": 1190
+ },
+ {
+ "epoch": 1.191335062986465,
+ "grad_norm": 0.6521658301353455,
+ "learning_rate": 0.0001316782718331643,
+ "loss": 1.4118,
+ "step": 1191
+ },
+ {
+ "epoch": 1.1923353443155889,
+ "grad_norm": 0.7188206315040588,
+ "learning_rate": 0.0001315788285288309,
+ "loss": 1.4436,
+ "step": 1192
+ },
+ {
+ "epoch": 1.1933356256447125,
+ "grad_norm": 0.7098423838615417,
+ "learning_rate": 0.00013147935052507597,
+ "loss": 1.3339,
+ "step": 1193
+ },
+ {
+ "epoch": 1.1943359069738364,
+ "grad_norm": 0.655750036239624,
+ "learning_rate": 0.00013137983793120786,
+ "loss": 1.4208,
+ "step": 1194
+ },
+ {
+ "epoch": 1.1953361883029603,
+ "grad_norm": 0.640650749206543,
+ "learning_rate": 0.0001312802908565729,
+ "loss": 1.7209,
+ "step": 1195
+ },
+ {
+ "epoch": 1.196336469632084,
+ "grad_norm": 0.601091206073761,
+ "learning_rate": 0.0001311807094105553,
+ "loss": 1.4339,
+ "step": 1196
+ },
+ {
+ "epoch": 1.1973367509612078,
+ "grad_norm": 0.5894292593002319,
+ "learning_rate": 0.00013108109370257712,
+ "loss": 1.4687,
+ "step": 1197
+ },
+ {
+ "epoch": 1.1983370322903317,
+ "grad_norm": 0.7012053728103638,
+ "learning_rate": 0.00013098144384209796,
+ "loss": 1.5834,
+ "step": 1198
+ },
+ {
+ "epoch": 1.1993373136194554,
+ "grad_norm": 0.636356770992279,
+ "learning_rate": 0.000130881759938615,
+ "loss": 1.1723,
+ "step": 1199
+ },
+ {
+ "epoch": 1.2003375949485793,
+ "grad_norm": 0.6913763284683228,
+ "learning_rate": 0.00013078204210166278,
+ "loss": 1.3327,
+ "step": 1200
+ },
+ {
+ "epoch": 1.2013378762777032,
+ "grad_norm": 0.7067025303840637,
+ "learning_rate": 0.00013068229044081324,
+ "loss": 1.4671,
+ "step": 1201
+ },
+ {
+ "epoch": 1.2023381576068268,
+ "grad_norm": 0.698302149772644,
+ "learning_rate": 0.0001305825050656754,
+ "loss": 1.601,
+ "step": 1202
+ },
+ {
+ "epoch": 1.2033384389359507,
+ "grad_norm": 0.6283687949180603,
+ "learning_rate": 0.00013048268608589533,
+ "loss": 1.4164,
+ "step": 1203
+ },
+ {
+ "epoch": 1.2043387202650746,
+ "grad_norm": 0.6440062522888184,
+ "learning_rate": 0.00013038283361115603,
+ "loss": 1.3725,
+ "step": 1204
+ },
+ {
+ "epoch": 1.2053390015941985,
+ "grad_norm": 0.726294994354248,
+ "learning_rate": 0.0001302829477511773,
+ "loss": 1.4557,
+ "step": 1205
+ },
+ {
+ "epoch": 1.2063392829233222,
+ "grad_norm": 0.5984981060028076,
+ "learning_rate": 0.0001301830286157157,
+ "loss": 1.3455,
+ "step": 1206
+ },
+ {
+ "epoch": 1.207339564252446,
+ "grad_norm": 0.6096123456954956,
+ "learning_rate": 0.0001300830763145642,
+ "loss": 1.3562,
+ "step": 1207
+ },
+ {
+ "epoch": 1.2083398455815697,
+ "grad_norm": 0.815022349357605,
+ "learning_rate": 0.00012998309095755235,
+ "loss": 1.5287,
+ "step": 1208
+ },
+ {
+ "epoch": 1.2093401269106936,
+ "grad_norm": 0.990715742111206,
+ "learning_rate": 0.00012988307265454597,
+ "loss": 1.4186,
+ "step": 1209
+ },
+ {
+ "epoch": 1.2103404082398175,
+ "grad_norm": 0.8175992369651794,
+ "learning_rate": 0.0001297830215154471,
+ "loss": 1.3145,
+ "step": 1210
+ },
+ {
+ "epoch": 1.2113406895689414,
+ "grad_norm": 0.734703779220581,
+ "learning_rate": 0.00012968293765019384,
+ "loss": 1.359,
+ "step": 1211
+ },
+ {
+ "epoch": 1.212340970898065,
+ "grad_norm": 0.7687499523162842,
+ "learning_rate": 0.00012958282116876026,
+ "loss": 1.517,
+ "step": 1212
+ },
+ {
+ "epoch": 1.213341252227189,
+ "grad_norm": 0.5819374918937683,
+ "learning_rate": 0.00012948267218115624,
+ "loss": 1.353,
+ "step": 1213
+ },
+ {
+ "epoch": 1.2143415335563128,
+ "grad_norm": 0.6273573040962219,
+ "learning_rate": 0.00012938249079742743,
+ "loss": 1.3114,
+ "step": 1214
+ },
+ {
+ "epoch": 1.2153418148854365,
+ "grad_norm": 0.8144434094429016,
+ "learning_rate": 0.00012928227712765504,
+ "loss": 1.4763,
+ "step": 1215
+ },
+ {
+ "epoch": 1.2163420962145604,
+ "grad_norm": 0.6941531300544739,
+ "learning_rate": 0.0001291820312819558,
+ "loss": 1.3386,
+ "step": 1216
+ },
+ {
+ "epoch": 1.2173423775436842,
+ "grad_norm": 0.6548559069633484,
+ "learning_rate": 0.00012908175337048174,
+ "loss": 1.4763,
+ "step": 1217
+ },
+ {
+ "epoch": 1.218342658872808,
+ "grad_norm": 0.6587492227554321,
+ "learning_rate": 0.00012898144350342015,
+ "loss": 1.5342,
+ "step": 1218
+ },
+ {
+ "epoch": 1.2193429402019318,
+ "grad_norm": 0.6187465190887451,
+ "learning_rate": 0.0001288811017909934,
+ "loss": 1.5109,
+ "step": 1219
+ },
+ {
+ "epoch": 1.2203432215310557,
+ "grad_norm": 0.6152673959732056,
+ "learning_rate": 0.00012878072834345895,
+ "loss": 1.2812,
+ "step": 1220
+ },
+ {
+ "epoch": 1.2213435028601793,
+ "grad_norm": 0.6489769816398621,
+ "learning_rate": 0.00012868032327110904,
+ "loss": 1.4179,
+ "step": 1221
+ },
+ {
+ "epoch": 1.2223437841893032,
+ "grad_norm": 0.6900584697723389,
+ "learning_rate": 0.00012857988668427066,
+ "loss": 1.5862,
+ "step": 1222
+ },
+ {
+ "epoch": 1.2233440655184271,
+ "grad_norm": 0.7858991026878357,
+ "learning_rate": 0.0001284794186933055,
+ "loss": 1.5595,
+ "step": 1223
+ },
+ {
+ "epoch": 1.2243443468475508,
+ "grad_norm": 0.6736376285552979,
+ "learning_rate": 0.00012837891940860972,
+ "loss": 1.6053,
+ "step": 1224
+ },
+ {
+ "epoch": 1.2253446281766747,
+ "grad_norm": 0.6328126192092896,
+ "learning_rate": 0.00012827838894061377,
+ "loss": 1.5157,
+ "step": 1225
+ },
+ {
+ "epoch": 1.2263449095057986,
+ "grad_norm": 0.6681983470916748,
+ "learning_rate": 0.00012817782739978255,
+ "loss": 1.3832,
+ "step": 1226
+ },
+ {
+ "epoch": 1.2273451908349222,
+ "grad_norm": 0.6474846601486206,
+ "learning_rate": 0.00012807723489661495,
+ "loss": 1.4772,
+ "step": 1227
+ },
+ {
+ "epoch": 1.228345472164046,
+ "grad_norm": 0.7181409597396851,
+ "learning_rate": 0.00012797661154164395,
+ "loss": 1.4942,
+ "step": 1228
+ },
+ {
+ "epoch": 1.22934575349317,
+ "grad_norm": 0.8670255541801453,
+ "learning_rate": 0.00012787595744543647,
+ "loss": 1.3818,
+ "step": 1229
+ },
+ {
+ "epoch": 1.2303460348222939,
+ "grad_norm": 0.7007678747177124,
+ "learning_rate": 0.00012777527271859307,
+ "loss": 1.4428,
+ "step": 1230
+ },
+ {
+ "epoch": 1.2313463161514175,
+ "grad_norm": 0.5932227373123169,
+ "learning_rate": 0.0001276745574717481,
+ "loss": 1.1946,
+ "step": 1231
+ },
+ {
+ "epoch": 1.2323465974805414,
+ "grad_norm": 0.6067792177200317,
+ "learning_rate": 0.00012757381181556943,
+ "loss": 1.1963,
+ "step": 1232
+ },
+ {
+ "epoch": 1.2333468788096653,
+ "grad_norm": 0.6702238917350769,
+ "learning_rate": 0.0001274730358607583,
+ "loss": 1.4489,
+ "step": 1233
+ },
+ {
+ "epoch": 1.234347160138789,
+ "grad_norm": 0.6660708785057068,
+ "learning_rate": 0.00012737222971804924,
+ "loss": 1.4307,
+ "step": 1234
+ },
+ {
+ "epoch": 1.2353474414679129,
+ "grad_norm": 0.6711790561676025,
+ "learning_rate": 0.00012727139349821,
+ "loss": 1.4167,
+ "step": 1235
+ },
+ {
+ "epoch": 1.2363477227970368,
+ "grad_norm": 0.7145324349403381,
+ "learning_rate": 0.0001271705273120413,
+ "loss": 1.4125,
+ "step": 1236
+ },
+ {
+ "epoch": 1.2373480041261604,
+ "grad_norm": 0.6572199463844299,
+ "learning_rate": 0.00012706963127037685,
+ "loss": 1.2403,
+ "step": 1237
+ },
+ {
+ "epoch": 1.2383482854552843,
+ "grad_norm": 0.634173572063446,
+ "learning_rate": 0.00012696870548408316,
+ "loss": 1.2861,
+ "step": 1238
+ },
+ {
+ "epoch": 1.2393485667844082,
+ "grad_norm": 0.6740716695785522,
+ "learning_rate": 0.00012686775006405946,
+ "loss": 1.6377,
+ "step": 1239
+ },
+ {
+ "epoch": 1.2403488481135319,
+ "grad_norm": 0.7334823608398438,
+ "learning_rate": 0.00012676676512123747,
+ "loss": 1.4186,
+ "step": 1240
+ },
+ {
+ "epoch": 1.2413491294426557,
+ "grad_norm": 0.9025078415870667,
+ "learning_rate": 0.00012666575076658134,
+ "loss": 1.4633,
+ "step": 1241
+ },
+ {
+ "epoch": 1.2423494107717796,
+ "grad_norm": 0.7449138760566711,
+ "learning_rate": 0.00012656470711108764,
+ "loss": 1.5294,
+ "step": 1242
+ },
+ {
+ "epoch": 1.2433496921009033,
+ "grad_norm": 0.6459099054336548,
+ "learning_rate": 0.00012646363426578505,
+ "loss": 1.4404,
+ "step": 1243
+ },
+ {
+ "epoch": 1.2443499734300272,
+ "grad_norm": 0.762955904006958,
+ "learning_rate": 0.0001263625323417343,
+ "loss": 1.6486,
+ "step": 1244
+ },
+ {
+ "epoch": 1.245350254759151,
+ "grad_norm": 0.6792619228363037,
+ "learning_rate": 0.0001262614014500282,
+ "loss": 1.3946,
+ "step": 1245
+ },
+ {
+ "epoch": 1.2463505360882747,
+ "grad_norm": 0.7938026189804077,
+ "learning_rate": 0.00012616024170179126,
+ "loss": 1.5163,
+ "step": 1246
+ },
+ {
+ "epoch": 1.2473508174173986,
+ "grad_norm": 0.6673023104667664,
+ "learning_rate": 0.00012605905320817976,
+ "loss": 1.5599,
+ "step": 1247
+ },
+ {
+ "epoch": 1.2483510987465225,
+ "grad_norm": 0.6543686389923096,
+ "learning_rate": 0.00012595783608038155,
+ "loss": 1.3298,
+ "step": 1248
+ },
+ {
+ "epoch": 1.2493513800756464,
+ "grad_norm": 0.6570146083831787,
+ "learning_rate": 0.00012585659042961596,
+ "loss": 1.2528,
+ "step": 1249
+ },
+ {
+ "epoch": 1.25035166140477,
+ "grad_norm": 0.6886934638023376,
+ "learning_rate": 0.00012575531636713368,
+ "loss": 1.4567,
+ "step": 1250
+ },
+ {
+ "epoch": 1.251351942733894,
+ "grad_norm": 0.660229504108429,
+ "learning_rate": 0.00012565401400421651,
+ "loss": 1.3333,
+ "step": 1251
+ },
+ {
+ "epoch": 1.2523522240630176,
+ "grad_norm": 0.7553595900535583,
+ "learning_rate": 0.0001255526834521775,
+ "loss": 1.4152,
+ "step": 1252
+ },
+ {
+ "epoch": 1.2533525053921415,
+ "grad_norm": 0.6283079385757446,
+ "learning_rate": 0.00012545132482236055,
+ "loss": 1.3201,
+ "step": 1253
+ },
+ {
+ "epoch": 1.2543527867212654,
+ "grad_norm": 0.6704882383346558,
+ "learning_rate": 0.0001253499382261405,
+ "loss": 1.3711,
+ "step": 1254
+ },
+ {
+ "epoch": 1.2553530680503893,
+ "grad_norm": 0.7282963991165161,
+ "learning_rate": 0.00012524852377492285,
+ "loss": 1.5492,
+ "step": 1255
+ },
+ {
+ "epoch": 1.256353349379513,
+ "grad_norm": 0.7712034583091736,
+ "learning_rate": 0.00012514708158014378,
+ "loss": 1.4299,
+ "step": 1256
+ },
+ {
+ "epoch": 1.2573536307086368,
+ "grad_norm": 0.6231324076652527,
+ "learning_rate": 0.00012504561175326985,
+ "loss": 1.3492,
+ "step": 1257
+ },
+ {
+ "epoch": 1.2583539120377607,
+ "grad_norm": 0.7122861742973328,
+ "learning_rate": 0.00012494411440579814,
+ "loss": 1.4583,
+ "step": 1258
+ },
+ {
+ "epoch": 1.2593541933668844,
+ "grad_norm": 0.7797596454620361,
+ "learning_rate": 0.0001248425896492558,
+ "loss": 1.4185,
+ "step": 1259
+ },
+ {
+ "epoch": 1.2603544746960083,
+ "grad_norm": 0.7058592438697815,
+ "learning_rate": 0.00012474103759520027,
+ "loss": 1.7918,
+ "step": 1260
+ },
+ {
+ "epoch": 1.2613547560251321,
+ "grad_norm": 0.6629828214645386,
+ "learning_rate": 0.00012463945835521878,
+ "loss": 1.2547,
+ "step": 1261
+ },
+ {
+ "epoch": 1.2623550373542558,
+ "grad_norm": 0.6975031495094299,
+ "learning_rate": 0.0001245378520409286,
+ "loss": 1.6256,
+ "step": 1262
+ },
+ {
+ "epoch": 1.2633553186833797,
+ "grad_norm": 0.7939541935920715,
+ "learning_rate": 0.0001244362187639767,
+ "loss": 1.2817,
+ "step": 1263
+ },
+ {
+ "epoch": 1.2643556000125036,
+ "grad_norm": 0.7042918801307678,
+ "learning_rate": 0.00012433455863603967,
+ "loss": 1.4207,
+ "step": 1264
+ },
+ {
+ "epoch": 1.2653558813416272,
+ "grad_norm": 0.6327396631240845,
+ "learning_rate": 0.00012423287176882358,
+ "loss": 1.3927,
+ "step": 1265
+ },
+ {
+ "epoch": 1.2663561626707511,
+ "grad_norm": 0.5797626376152039,
+ "learning_rate": 0.00012413115827406392,
+ "loss": 1.2808,
+ "step": 1266
+ },
+ {
+ "epoch": 1.267356443999875,
+ "grad_norm": 0.6891800165176392,
+ "learning_rate": 0.00012402941826352546,
+ "loss": 1.2385,
+ "step": 1267
+ },
+ {
+ "epoch": 1.268356725328999,
+ "grad_norm": 0.6648104190826416,
+ "learning_rate": 0.00012392765184900202,
+ "loss": 1.5402,
+ "step": 1268
+ },
+ {
+ "epoch": 1.2693570066581226,
+ "grad_norm": 0.7340229749679565,
+ "learning_rate": 0.0001238258591423165,
+ "loss": 1.2926,
+ "step": 1269
+ },
+ {
+ "epoch": 1.2703572879872465,
+ "grad_norm": 0.670605480670929,
+ "learning_rate": 0.00012372404025532072,
+ "loss": 1.3863,
+ "step": 1270
+ },
+ {
+ "epoch": 1.2713575693163701,
+ "grad_norm": 0.7016957402229309,
+ "learning_rate": 0.00012362219529989514,
+ "loss": 1.7362,
+ "step": 1271
+ },
+ {
+ "epoch": 1.272357850645494,
+ "grad_norm": 0.6318536400794983,
+ "learning_rate": 0.00012352032438794902,
+ "loss": 1.4278,
+ "step": 1272
+ },
+ {
+ "epoch": 1.273358131974618,
+ "grad_norm": 0.5807138681411743,
+ "learning_rate": 0.00012341842763142005,
+ "loss": 1.4762,
+ "step": 1273
+ },
+ {
+ "epoch": 1.2743584133037418,
+ "grad_norm": 0.6634588241577148,
+ "learning_rate": 0.00012331650514227425,
+ "loss": 1.6209,
+ "step": 1274
+ },
+ {
+ "epoch": 1.2753586946328654,
+ "grad_norm": 0.709530770778656,
+ "learning_rate": 0.00012321455703250616,
+ "loss": 1.5209,
+ "step": 1275
+ },
+ {
+ "epoch": 1.2763589759619893,
+ "grad_norm": 0.678584098815918,
+ "learning_rate": 0.00012311258341413822,
+ "loss": 1.4247,
+ "step": 1276
+ },
+ {
+ "epoch": 1.277359257291113,
+ "grad_norm": 0.6134077310562134,
+ "learning_rate": 0.00012301058439922102,
+ "loss": 1.3792,
+ "step": 1277
+ },
+ {
+ "epoch": 1.2783595386202369,
+ "grad_norm": 0.694976806640625,
+ "learning_rate": 0.000122908560099833,
+ "loss": 1.5957,
+ "step": 1278
+ },
+ {
+ "epoch": 1.2793598199493608,
+ "grad_norm": 0.835444986820221,
+ "learning_rate": 0.00012280651062808047,
+ "loss": 1.4917,
+ "step": 1279
+ },
+ {
+ "epoch": 1.2803601012784847,
+ "grad_norm": 0.6491605043411255,
+ "learning_rate": 0.00012270443609609729,
+ "loss": 1.4417,
+ "step": 1280
+ },
+ {
+ "epoch": 1.2813603826076083,
+ "grad_norm": 0.6651148200035095,
+ "learning_rate": 0.0001226023366160449,
+ "loss": 1.2857,
+ "step": 1281
+ },
+ {
+ "epoch": 1.2823606639367322,
+ "grad_norm": 0.6736195683479309,
+ "learning_rate": 0.00012250021230011225,
+ "loss": 1.3431,
+ "step": 1282
+ },
+ {
+ "epoch": 1.283360945265856,
+ "grad_norm": 0.7162345051765442,
+ "learning_rate": 0.00012239806326051539,
+ "loss": 1.4009,
+ "step": 1283
+ },
+ {
+ "epoch": 1.2843612265949798,
+ "grad_norm": 0.8337399363517761,
+ "learning_rate": 0.00012229588960949771,
+ "loss": 1.5303,
+ "step": 1284
+ },
+ {
+ "epoch": 1.2853615079241036,
+ "grad_norm": 0.6648454666137695,
+ "learning_rate": 0.00012219369145932959,
+ "loss": 1.7088,
+ "step": 1285
+ },
+ {
+ "epoch": 1.2863617892532275,
+ "grad_norm": 0.6891435384750366,
+ "learning_rate": 0.00012209146892230822,
+ "loss": 1.4053,
+ "step": 1286
+ },
+ {
+ "epoch": 1.2873620705823514,
+ "grad_norm": 0.6659008860588074,
+ "learning_rate": 0.00012198922211075778,
+ "loss": 1.3959,
+ "step": 1287
+ },
+ {
+ "epoch": 1.288362351911475,
+ "grad_norm": 0.6926385164260864,
+ "learning_rate": 0.00012188695113702896,
+ "loss": 1.536,
+ "step": 1288
+ },
+ {
+ "epoch": 1.289362633240599,
+ "grad_norm": 0.6584843397140503,
+ "learning_rate": 0.00012178465611349911,
+ "loss": 1.5099,
+ "step": 1289
+ },
+ {
+ "epoch": 1.2903629145697226,
+ "grad_norm": 0.7430850267410278,
+ "learning_rate": 0.00012168233715257194,
+ "loss": 1.3367,
+ "step": 1290
+ },
+ {
+ "epoch": 1.2913631958988465,
+ "grad_norm": 0.8379004597663879,
+ "learning_rate": 0.00012157999436667747,
+ "loss": 1.3542,
+ "step": 1291
+ },
+ {
+ "epoch": 1.2923634772279704,
+ "grad_norm": 0.7217230796813965,
+ "learning_rate": 0.00012147762786827193,
+ "loss": 1.4465,
+ "step": 1292
+ },
+ {
+ "epoch": 1.2933637585570943,
+ "grad_norm": 0.7268504500389099,
+ "learning_rate": 0.00012137523776983757,
+ "loss": 1.2616,
+ "step": 1293
+ },
+ {
+ "epoch": 1.294364039886218,
+ "grad_norm": 0.7402834296226501,
+ "learning_rate": 0.00012127282418388264,
+ "loss": 1.271,
+ "step": 1294
+ },
+ {
+ "epoch": 1.2953643212153418,
+ "grad_norm": 0.6314610242843628,
+ "learning_rate": 0.0001211703872229411,
+ "loss": 1.3718,
+ "step": 1295
+ },
+ {
+ "epoch": 1.2963646025444655,
+ "grad_norm": 0.6106632947921753,
+ "learning_rate": 0.00012106792699957263,
+ "loss": 1.6404,
+ "step": 1296
+ },
+ {
+ "epoch": 1.2973648838735894,
+ "grad_norm": 0.6568691730499268,
+ "learning_rate": 0.00012096544362636255,
+ "loss": 1.3559,
+ "step": 1297
+ },
+ {
+ "epoch": 1.2983651652027133,
+ "grad_norm": 0.700645387172699,
+ "learning_rate": 0.00012086293721592152,
+ "loss": 1.5258,
+ "step": 1298
+ },
+ {
+ "epoch": 1.2993654465318372,
+ "grad_norm": 1.0722559690475464,
+ "learning_rate": 0.00012076040788088554,
+ "loss": 1.4921,
+ "step": 1299
+ },
+ {
+ "epoch": 1.3003657278609608,
+ "grad_norm": 0.6164102554321289,
+ "learning_rate": 0.00012065785573391581,
+ "loss": 1.3766,
+ "step": 1300
+ },
+ {
+ "epoch": 1.3013660091900847,
+ "grad_norm": 0.7066829204559326,
+ "learning_rate": 0.00012055528088769861,
+ "loss": 1.4322,
+ "step": 1301
+ },
+ {
+ "epoch": 1.3023662905192086,
+ "grad_norm": 0.6311153769493103,
+ "learning_rate": 0.00012045268345494511,
+ "loss": 1.2958,
+ "step": 1302
+ },
+ {
+ "epoch": 1.3033665718483323,
+ "grad_norm": 0.6254247426986694,
+ "learning_rate": 0.00012035006354839133,
+ "loss": 1.3125,
+ "step": 1303
+ },
+ {
+ "epoch": 1.3043668531774562,
+ "grad_norm": 0.7812719941139221,
+ "learning_rate": 0.00012024742128079805,
+ "loss": 1.5446,
+ "step": 1304
+ },
+ {
+ "epoch": 1.30536713450658,
+ "grad_norm": 0.6067742109298706,
+ "learning_rate": 0.00012014475676495052,
+ "loss": 1.2821,
+ "step": 1305
+ },
+ {
+ "epoch": 1.306367415835704,
+ "grad_norm": 0.6812780499458313,
+ "learning_rate": 0.00012004207011365849,
+ "loss": 1.2988,
+ "step": 1306
+ },
+ {
+ "epoch": 1.3073676971648276,
+ "grad_norm": 0.6978224515914917,
+ "learning_rate": 0.00011993936143975599,
+ "loss": 1.3677,
+ "step": 1307
+ },
+ {
+ "epoch": 1.3083679784939515,
+ "grad_norm": 0.660102903842926,
+ "learning_rate": 0.00011983663085610131,
+ "loss": 1.3688,
+ "step": 1308
+ },
+ {
+ "epoch": 1.3093682598230751,
+ "grad_norm": 0.706007182598114,
+ "learning_rate": 0.00011973387847557676,
+ "loss": 1.2638,
+ "step": 1309
+ },
+ {
+ "epoch": 1.310368541152199,
+ "grad_norm": 0.6711030602455139,
+ "learning_rate": 0.00011963110441108863,
+ "loss": 1.2569,
+ "step": 1310
+ },
+ {
+ "epoch": 1.311368822481323,
+ "grad_norm": 0.6825215816497803,
+ "learning_rate": 0.000119528308775567,
+ "loss": 1.3912,
+ "step": 1311
+ },
+ {
+ "epoch": 1.3123691038104468,
+ "grad_norm": 0.725979745388031,
+ "learning_rate": 0.00011942549168196575,
+ "loss": 1.331,
+ "step": 1312
+ },
+ {
+ "epoch": 1.3133693851395705,
+ "grad_norm": 0.6699597835540771,
+ "learning_rate": 0.00011932265324326221,
+ "loss": 1.4171,
+ "step": 1313
+ },
+ {
+ "epoch": 1.3143696664686944,
+ "grad_norm": 0.6116276383399963,
+ "learning_rate": 0.0001192197935724573,
+ "loss": 1.3333,
+ "step": 1314
+ },
+ {
+ "epoch": 1.315369947797818,
+ "grad_norm": 0.6644623875617981,
+ "learning_rate": 0.00011911691278257511,
+ "loss": 1.5515,
+ "step": 1315
+ },
+ {
+ "epoch": 1.316370229126942,
+ "grad_norm": 0.6456226110458374,
+ "learning_rate": 0.0001190140109866631,
+ "loss": 1.2691,
+ "step": 1316
+ },
+ {
+ "epoch": 1.3173705104560658,
+ "grad_norm": 0.6665071249008179,
+ "learning_rate": 0.00011891108829779165,
+ "loss": 1.3782,
+ "step": 1317
+ },
+ {
+ "epoch": 1.3183707917851897,
+ "grad_norm": 0.7108166813850403,
+ "learning_rate": 0.00011880814482905422,
+ "loss": 1.3122,
+ "step": 1318
+ },
+ {
+ "epoch": 1.3193710731143133,
+ "grad_norm": 0.7184962630271912,
+ "learning_rate": 0.00011870518069356709,
+ "loss": 1.3624,
+ "step": 1319
+ },
+ {
+ "epoch": 1.3203713544434372,
+ "grad_norm": 0.6516618132591248,
+ "learning_rate": 0.0001186021960044692,
+ "loss": 1.5654,
+ "step": 1320
+ },
+ {
+ "epoch": 1.3213716357725611,
+ "grad_norm": 0.6597375869750977,
+ "learning_rate": 0.00011849919087492211,
+ "loss": 1.4765,
+ "step": 1321
+ },
+ {
+ "epoch": 1.3223719171016848,
+ "grad_norm": 0.8731528520584106,
+ "learning_rate": 0.00011839616541810983,
+ "loss": 1.3876,
+ "step": 1322
+ },
+ {
+ "epoch": 1.3233721984308087,
+ "grad_norm": 0.6694337725639343,
+ "learning_rate": 0.00011829311974723867,
+ "loss": 1.327,
+ "step": 1323
+ },
+ {
+ "epoch": 1.3243724797599326,
+ "grad_norm": 0.7454182505607605,
+ "learning_rate": 0.00011819005397553723,
+ "loss": 1.4202,
+ "step": 1324
+ },
+ {
+ "epoch": 1.3253727610890562,
+ "grad_norm": 0.6783546209335327,
+ "learning_rate": 0.00011808696821625613,
+ "loss": 1.3966,
+ "step": 1325
+ },
+ {
+ "epoch": 1.32637304241818,
+ "grad_norm": 0.7126333713531494,
+ "learning_rate": 0.000117983862582668,
+ "loss": 1.3896,
+ "step": 1326
+ },
+ {
+ "epoch": 1.327373323747304,
+ "grad_norm": 0.6765009164810181,
+ "learning_rate": 0.00011788073718806725,
+ "loss": 1.3678,
+ "step": 1327
+ },
+ {
+ "epoch": 1.3283736050764277,
+ "grad_norm": 0.6767436861991882,
+ "learning_rate": 0.00011777759214577006,
+ "loss": 1.497,
+ "step": 1328
+ },
+ {
+ "epoch": 1.3293738864055515,
+ "grad_norm": 0.7058733105659485,
+ "learning_rate": 0.00011767442756911417,
+ "loss": 1.4992,
+ "step": 1329
+ },
+ {
+ "epoch": 1.3303741677346754,
+ "grad_norm": 0.6815193295478821,
+ "learning_rate": 0.00011757124357145881,
+ "loss": 1.4952,
+ "step": 1330
+ },
+ {
+ "epoch": 1.3313744490637993,
+ "grad_norm": 0.6513908505439758,
+ "learning_rate": 0.00011746804026618452,
+ "loss": 1.5202,
+ "step": 1331
+ },
+ {
+ "epoch": 1.332374730392923,
+ "grad_norm": 0.6763479113578796,
+ "learning_rate": 0.00011736481776669306,
+ "loss": 1.4547,
+ "step": 1332
+ },
+ {
+ "epoch": 1.3333750117220469,
+ "grad_norm": 0.6361576914787292,
+ "learning_rate": 0.00011726157618640728,
+ "loss": 1.3231,
+ "step": 1333
+ },
+ {
+ "epoch": 1.3343752930511705,
+ "grad_norm": 0.7247117161750793,
+ "learning_rate": 0.00011715831563877104,
+ "loss": 1.5126,
+ "step": 1334
+ },
+ {
+ "epoch": 1.3353755743802944,
+ "grad_norm": 0.8347336649894714,
+ "learning_rate": 0.00011705503623724898,
+ "loss": 1.5669,
+ "step": 1335
+ },
+ {
+ "epoch": 1.3363758557094183,
+ "grad_norm": 0.6969489455223083,
+ "learning_rate": 0.00011695173809532652,
+ "loss": 1.3646,
+ "step": 1336
+ },
+ {
+ "epoch": 1.3373761370385422,
+ "grad_norm": 0.6771052479743958,
+ "learning_rate": 0.00011684842132650957,
+ "loss": 1.6715,
+ "step": 1337
+ },
+ {
+ "epoch": 1.3383764183676659,
+ "grad_norm": 0.6224768161773682,
+ "learning_rate": 0.00011674508604432464,
+ "loss": 1.5712,
+ "step": 1338
+ },
+ {
+ "epoch": 1.3393766996967897,
+ "grad_norm": 0.6684542298316956,
+ "learning_rate": 0.00011664173236231848,
+ "loss": 1.5669,
+ "step": 1339
+ },
+ {
+ "epoch": 1.3403769810259134,
+ "grad_norm": 0.6805415153503418,
+ "learning_rate": 0.0001165383603940581,
+ "loss": 1.3648,
+ "step": 1340
+ },
+ {
+ "epoch": 1.3413772623550373,
+ "grad_norm": 0.7991671562194824,
+ "learning_rate": 0.00011643497025313061,
+ "loss": 1.3703,
+ "step": 1341
+ },
+ {
+ "epoch": 1.3423775436841612,
+ "grad_norm": 0.7023015022277832,
+ "learning_rate": 0.00011633156205314309,
+ "loss": 1.366,
+ "step": 1342
+ },
+ {
+ "epoch": 1.343377825013285,
+ "grad_norm": 0.7017894387245178,
+ "learning_rate": 0.00011622813590772244,
+ "loss": 1.4816,
+ "step": 1343
+ },
+ {
+ "epoch": 1.3443781063424087,
+ "grad_norm": 0.704626739025116,
+ "learning_rate": 0.00011612469193051525,
+ "loss": 1.2841,
+ "step": 1344
+ },
+ {
+ "epoch": 1.3453783876715326,
+ "grad_norm": 0.707372784614563,
+ "learning_rate": 0.00011602123023518779,
+ "loss": 1.3226,
+ "step": 1345
+ },
+ {
+ "epoch": 1.3463786690006565,
+ "grad_norm": 0.6367921233177185,
+ "learning_rate": 0.00011591775093542572,
+ "loss": 1.3292,
+ "step": 1346
+ },
+ {
+ "epoch": 1.3473789503297802,
+ "grad_norm": 0.7131246328353882,
+ "learning_rate": 0.0001158142541449341,
+ "loss": 1.3537,
+ "step": 1347
+ },
+ {
+ "epoch": 1.348379231658904,
+ "grad_norm": 0.6140089631080627,
+ "learning_rate": 0.00011571073997743716,
+ "loss": 1.4316,
+ "step": 1348
+ },
+ {
+ "epoch": 1.349379512988028,
+ "grad_norm": 0.6347403526306152,
+ "learning_rate": 0.0001156072085466783,
+ "loss": 1.4214,
+ "step": 1349
+ },
+ {
+ "epoch": 1.3503797943171518,
+ "grad_norm": 0.5540759563446045,
+ "learning_rate": 0.00011550365996641979,
+ "loss": 1.25,
+ "step": 1350
+ },
+ {
+ "epoch": 1.3513800756462755,
+ "grad_norm": 0.6721670031547546,
+ "learning_rate": 0.00011540009435044281,
+ "loss": 1.381,
+ "step": 1351
+ },
+ {
+ "epoch": 1.3523803569753994,
+ "grad_norm": 0.5901767015457153,
+ "learning_rate": 0.00011529651181254723,
+ "loss": 1.5127,
+ "step": 1352
+ },
+ {
+ "epoch": 1.353380638304523,
+ "grad_norm": 0.6438884139060974,
+ "learning_rate": 0.0001151929124665516,
+ "loss": 1.3348,
+ "step": 1353
+ },
+ {
+ "epoch": 1.354380919633647,
+ "grad_norm": 0.9631819725036621,
+ "learning_rate": 0.00011508929642629274,
+ "loss": 1.5822,
+ "step": 1354
+ },
+ {
+ "epoch": 1.3553812009627708,
+ "grad_norm": 0.6426034569740295,
+ "learning_rate": 0.00011498566380562601,
+ "loss": 1.3487,
+ "step": 1355
+ },
+ {
+ "epoch": 1.3563814822918947,
+ "grad_norm": 0.682264506816864,
+ "learning_rate": 0.0001148820147184249,
+ "loss": 1.3331,
+ "step": 1356
+ },
+ {
+ "epoch": 1.3573817636210184,
+ "grad_norm": 0.746907114982605,
+ "learning_rate": 0.00011477834927858104,
+ "loss": 1.38,
+ "step": 1357
+ },
+ {
+ "epoch": 1.3583820449501423,
+ "grad_norm": 0.7521925568580627,
+ "learning_rate": 0.00011467466760000399,
+ "loss": 1.3798,
+ "step": 1358
+ },
+ {
+ "epoch": 1.359382326279266,
+ "grad_norm": 0.7887171506881714,
+ "learning_rate": 0.00011457096979662114,
+ "loss": 1.3306,
+ "step": 1359
+ },
+ {
+ "epoch": 1.3603826076083898,
+ "grad_norm": 0.7623118162155151,
+ "learning_rate": 0.00011446725598237767,
+ "loss": 1.4307,
+ "step": 1360
+ },
+ {
+ "epoch": 1.3613828889375137,
+ "grad_norm": 0.6182076930999756,
+ "learning_rate": 0.00011436352627123623,
+ "loss": 1.3776,
+ "step": 1361
+ },
+ {
+ "epoch": 1.3623831702666376,
+ "grad_norm": 0.6136983633041382,
+ "learning_rate": 0.00011425978077717709,
+ "loss": 1.4815,
+ "step": 1362
+ },
+ {
+ "epoch": 1.3633834515957612,
+ "grad_norm": 0.6165998578071594,
+ "learning_rate": 0.00011415601961419775,
+ "loss": 1.4252,
+ "step": 1363
+ },
+ {
+ "epoch": 1.3643837329248851,
+ "grad_norm": 0.7338213324546814,
+ "learning_rate": 0.00011405224289631295,
+ "loss": 1.4426,
+ "step": 1364
+ },
+ {
+ "epoch": 1.365384014254009,
+ "grad_norm": 0.8061873316764832,
+ "learning_rate": 0.00011394845073755455,
+ "loss": 1.5366,
+ "step": 1365
+ },
+ {
+ "epoch": 1.3663842955831327,
+ "grad_norm": 0.8496206402778625,
+ "learning_rate": 0.0001138446432519714,
+ "loss": 1.3305,
+ "step": 1366
+ },
+ {
+ "epoch": 1.3673845769122566,
+ "grad_norm": 0.7370564341545105,
+ "learning_rate": 0.00011374082055362909,
+ "loss": 1.5069,
+ "step": 1367
+ },
+ {
+ "epoch": 1.3683848582413805,
+ "grad_norm": 0.630095362663269,
+ "learning_rate": 0.00011363698275661001,
+ "loss": 1.2846,
+ "step": 1368
+ },
+ {
+ "epoch": 1.3693851395705043,
+ "grad_norm": 0.7039877772331238,
+ "learning_rate": 0.00011353312997501313,
+ "loss": 1.4091,
+ "step": 1369
+ },
+ {
+ "epoch": 1.370385420899628,
+ "grad_norm": 0.7010135650634766,
+ "learning_rate": 0.00011342926232295386,
+ "loss": 1.4068,
+ "step": 1370
+ },
+ {
+ "epoch": 1.371385702228752,
+ "grad_norm": 0.6542472243309021,
+ "learning_rate": 0.00011332537991456398,
+ "loss": 1.3422,
+ "step": 1371
+ },
+ {
+ "epoch": 1.3723859835578756,
+ "grad_norm": 0.7767142057418823,
+ "learning_rate": 0.00011322148286399147,
+ "loss": 1.3156,
+ "step": 1372
+ },
+ {
+ "epoch": 1.3733862648869994,
+ "grad_norm": 0.7862635254859924,
+ "learning_rate": 0.0001131175712854004,
+ "loss": 1.3952,
+ "step": 1373
+ },
+ {
+ "epoch": 1.3743865462161233,
+ "grad_norm": 0.7444994449615479,
+ "learning_rate": 0.00011301364529297079,
+ "loss": 1.2304,
+ "step": 1374
+ },
+ {
+ "epoch": 1.3753868275452472,
+ "grad_norm": 0.6078478693962097,
+ "learning_rate": 0.0001129097050008985,
+ "loss": 1.4364,
+ "step": 1375
+ },
+ {
+ "epoch": 1.3763871088743709,
+ "grad_norm": 0.7011739015579224,
+ "learning_rate": 0.00011280575052339514,
+ "loss": 1.4455,
+ "step": 1376
+ },
+ {
+ "epoch": 1.3773873902034948,
+ "grad_norm": 0.7185930013656616,
+ "learning_rate": 0.00011270178197468789,
+ "loss": 1.349,
+ "step": 1377
+ },
+ {
+ "epoch": 1.3783876715326184,
+ "grad_norm": 0.6734675168991089,
+ "learning_rate": 0.00011259779946901934,
+ "loss": 1.4803,
+ "step": 1378
+ },
+ {
+ "epoch": 1.3793879528617423,
+ "grad_norm": 0.674439013004303,
+ "learning_rate": 0.0001124938031206475,
+ "loss": 1.5707,
+ "step": 1379
+ },
+ {
+ "epoch": 1.3803882341908662,
+ "grad_norm": 0.6843717694282532,
+ "learning_rate": 0.00011238979304384554,
+ "loss": 1.5508,
+ "step": 1380
+ },
+ {
+ "epoch": 1.38138851551999,
+ "grad_norm": 0.6178708672523499,
+ "learning_rate": 0.0001122857693529017,
+ "loss": 1.2827,
+ "step": 1381
+ },
+ {
+ "epoch": 1.3823887968491138,
+ "grad_norm": 0.721108078956604,
+ "learning_rate": 0.0001121817321621192,
+ "loss": 1.3768,
+ "step": 1382
+ },
+ {
+ "epoch": 1.3833890781782376,
+ "grad_norm": 0.6790196299552917,
+ "learning_rate": 0.00011207768158581613,
+ "loss": 1.508,
+ "step": 1383
+ },
+ {
+ "epoch": 1.3843893595073615,
+ "grad_norm": 0.6942607760429382,
+ "learning_rate": 0.00011197361773832525,
+ "loss": 1.2476,
+ "step": 1384
+ },
+ {
+ "epoch": 1.3853896408364852,
+ "grad_norm": 0.6791447997093201,
+ "learning_rate": 0.00011186954073399387,
+ "loss": 1.5537,
+ "step": 1385
+ },
+ {
+ "epoch": 1.386389922165609,
+ "grad_norm": 0.6842163801193237,
+ "learning_rate": 0.00011176545068718385,
+ "loss": 1.4336,
+ "step": 1386
+ },
+ {
+ "epoch": 1.387390203494733,
+ "grad_norm": 0.6922981142997742,
+ "learning_rate": 0.0001116613477122713,
+ "loss": 1.2496,
+ "step": 1387
+ },
+ {
+ "epoch": 1.3883904848238566,
+ "grad_norm": 0.7480785250663757,
+ "learning_rate": 0.00011155723192364658,
+ "loss": 1.5798,
+ "step": 1388
+ },
+ {
+ "epoch": 1.3893907661529805,
+ "grad_norm": 0.6371482610702515,
+ "learning_rate": 0.00011145310343571411,
+ "loss": 1.326,
+ "step": 1389
+ },
+ {
+ "epoch": 1.3903910474821044,
+ "grad_norm": 0.663810670375824,
+ "learning_rate": 0.00011134896236289224,
+ "loss": 1.3021,
+ "step": 1390
+ },
+ {
+ "epoch": 1.391391328811228,
+ "grad_norm": 0.6067004799842834,
+ "learning_rate": 0.0001112448088196132,
+ "loss": 1.5062,
+ "step": 1391
+ },
+ {
+ "epoch": 1.392391610140352,
+ "grad_norm": 0.6500088572502136,
+ "learning_rate": 0.00011114064292032282,
+ "loss": 1.3196,
+ "step": 1392
+ },
+ {
+ "epoch": 1.3933918914694758,
+ "grad_norm": 0.7117498517036438,
+ "learning_rate": 0.0001110364647794807,
+ "loss": 1.354,
+ "step": 1393
+ },
+ {
+ "epoch": 1.3943921727985997,
+ "grad_norm": 0.5792518854141235,
+ "learning_rate": 0.00011093227451155974,
+ "loss": 1.1684,
+ "step": 1394
+ },
+ {
+ "epoch": 1.3953924541277234,
+ "grad_norm": 0.6920313835144043,
+ "learning_rate": 0.0001108280722310462,
+ "loss": 1.5114,
+ "step": 1395
+ },
+ {
+ "epoch": 1.3963927354568473,
+ "grad_norm": 0.5932325720787048,
+ "learning_rate": 0.0001107238580524395,
+ "loss": 1.4519,
+ "step": 1396
+ },
+ {
+ "epoch": 1.397393016785971,
+ "grad_norm": 0.7241511940956116,
+ "learning_rate": 0.00011061963209025223,
+ "loss": 1.4131,
+ "step": 1397
+ },
+ {
+ "epoch": 1.3983932981150948,
+ "grad_norm": 0.6144927740097046,
+ "learning_rate": 0.00011051539445900983,
+ "loss": 1.4436,
+ "step": 1398
+ },
+ {
+ "epoch": 1.3993935794442187,
+ "grad_norm": 0.589124321937561,
+ "learning_rate": 0.00011041114527325065,
+ "loss": 1.4069,
+ "step": 1399
+ },
+ {
+ "epoch": 1.4003938607733426,
+ "grad_norm": 0.6695122122764587,
+ "learning_rate": 0.00011030688464752566,
+ "loss": 1.6472,
+ "step": 1400
+ },
+ {
+ "epoch": 1.4013941421024663,
+ "grad_norm": 0.6082026362419128,
+ "learning_rate": 0.00011020261269639842,
+ "loss": 1.3188,
+ "step": 1401
+ },
+ {
+ "epoch": 1.4023944234315902,
+ "grad_norm": 0.8926504254341125,
+ "learning_rate": 0.000110098329534445,
+ "loss": 1.4195,
+ "step": 1402
+ },
+ {
+ "epoch": 1.4033947047607138,
+ "grad_norm": 0.6825796365737915,
+ "learning_rate": 0.00010999403527625367,
+ "loss": 1.5214,
+ "step": 1403
+ },
+ {
+ "epoch": 1.4043949860898377,
+ "grad_norm": 0.6856653690338135,
+ "learning_rate": 0.00010988973003642499,
+ "loss": 1.4579,
+ "step": 1404
+ },
+ {
+ "epoch": 1.4053952674189616,
+ "grad_norm": 0.6756052374839783,
+ "learning_rate": 0.00010978541392957156,
+ "loss": 1.331,
+ "step": 1405
+ },
+ {
+ "epoch": 1.4063955487480855,
+ "grad_norm": 0.6562577486038208,
+ "learning_rate": 0.00010968108707031792,
+ "loss": 1.2906,
+ "step": 1406
+ },
+ {
+ "epoch": 1.4073958300772091,
+ "grad_norm": 0.7208355069160461,
+ "learning_rate": 0.00010957674957330042,
+ "loss": 1.5163,
+ "step": 1407
+ },
+ {
+ "epoch": 1.408396111406333,
+ "grad_norm": 0.6576356291770935,
+ "learning_rate": 0.00010947240155316707,
+ "loss": 1.3934,
+ "step": 1408
+ },
+ {
+ "epoch": 1.409396392735457,
+ "grad_norm": 0.6244364976882935,
+ "learning_rate": 0.00010936804312457749,
+ "loss": 1.3903,
+ "step": 1409
+ },
+ {
+ "epoch": 1.4103966740645806,
+ "grad_norm": 0.5791237354278564,
+ "learning_rate": 0.00010926367440220276,
+ "loss": 1.2708,
+ "step": 1410
+ },
+ {
+ "epoch": 1.4113969553937045,
+ "grad_norm": 0.7043458819389343,
+ "learning_rate": 0.00010915929550072517,
+ "loss": 1.2446,
+ "step": 1411
+ },
+ {
+ "epoch": 1.4123972367228284,
+ "grad_norm": 0.5865835547447205,
+ "learning_rate": 0.00010905490653483827,
+ "loss": 1.657,
+ "step": 1412
+ },
+ {
+ "epoch": 1.4133975180519522,
+ "grad_norm": 0.6033587455749512,
+ "learning_rate": 0.00010895050761924668,
+ "loss": 1.4481,
+ "step": 1413
+ },
+ {
+ "epoch": 1.414397799381076,
+ "grad_norm": 0.7100054025650024,
+ "learning_rate": 0.00010884609886866588,
+ "loss": 1.5413,
+ "step": 1414
+ },
+ {
+ "epoch": 1.4153980807101998,
+ "grad_norm": 0.8067667484283447,
+ "learning_rate": 0.00010874168039782227,
+ "loss": 1.4327,
+ "step": 1415
+ },
+ {
+ "epoch": 1.4163983620393235,
+ "grad_norm": 0.7058808207511902,
+ "learning_rate": 0.00010863725232145286,
+ "loss": 1.534,
+ "step": 1416
+ },
+ {
+ "epoch": 1.4173986433684473,
+ "grad_norm": 0.5253664255142212,
+ "learning_rate": 0.00010853281475430517,
+ "loss": 1.0816,
+ "step": 1417
+ },
+ {
+ "epoch": 1.4183989246975712,
+ "grad_norm": 0.6874144673347473,
+ "learning_rate": 0.0001084283678111372,
+ "loss": 1.4386,
+ "step": 1418
+ },
+ {
+ "epoch": 1.4193992060266951,
+ "grad_norm": 0.74046790599823,
+ "learning_rate": 0.00010832391160671729,
+ "loss": 1.3393,
+ "step": 1419
+ },
+ {
+ "epoch": 1.4203994873558188,
+ "grad_norm": 0.6461816430091858,
+ "learning_rate": 0.00010821944625582392,
+ "loss": 1.5251,
+ "step": 1420
+ },
+ {
+ "epoch": 1.4213997686849427,
+ "grad_norm": 0.6058275699615479,
+ "learning_rate": 0.00010811497187324555,
+ "loss": 1.1555,
+ "step": 1421
+ },
+ {
+ "epoch": 1.4224000500140663,
+ "grad_norm": 0.6121847033500671,
+ "learning_rate": 0.00010801048857378071,
+ "loss": 1.248,
+ "step": 1422
+ },
+ {
+ "epoch": 1.4234003313431902,
+ "grad_norm": 0.5949802398681641,
+ "learning_rate": 0.00010790599647223763,
+ "loss": 1.5056,
+ "step": 1423
+ },
+ {
+ "epoch": 1.424400612672314,
+ "grad_norm": 0.6441097259521484,
+ "learning_rate": 0.0001078014956834342,
+ "loss": 1.5484,
+ "step": 1424
+ },
+ {
+ "epoch": 1.425400894001438,
+ "grad_norm": 0.686570942401886,
+ "learning_rate": 0.00010769698632219794,
+ "loss": 1.631,
+ "step": 1425
+ },
+ {
+ "epoch": 1.4264011753305617,
+ "grad_norm": 0.675699770450592,
+ "learning_rate": 0.00010759246850336572,
+ "loss": 1.4044,
+ "step": 1426
+ },
+ {
+ "epoch": 1.4274014566596855,
+ "grad_norm": 0.7777390480041504,
+ "learning_rate": 0.0001074879423417837,
+ "loss": 1.6326,
+ "step": 1427
+ },
+ {
+ "epoch": 1.4284017379888094,
+ "grad_norm": 0.6218550205230713,
+ "learning_rate": 0.00010738340795230721,
+ "loss": 1.5341,
+ "step": 1428
+ },
+ {
+ "epoch": 1.429402019317933,
+ "grad_norm": 0.7164304256439209,
+ "learning_rate": 0.00010727886544980068,
+ "loss": 1.5888,
+ "step": 1429
+ },
+ {
+ "epoch": 1.430402300647057,
+ "grad_norm": 0.6924182772636414,
+ "learning_rate": 0.00010717431494913741,
+ "loss": 1.2542,
+ "step": 1430
+ },
+ {
+ "epoch": 1.4314025819761809,
+ "grad_norm": 0.6145774126052856,
+ "learning_rate": 0.00010706975656519946,
+ "loss": 1.4038,
+ "step": 1431
+ },
+ {
+ "epoch": 1.4324028633053048,
+ "grad_norm": 0.5560014843940735,
+ "learning_rate": 0.00010696519041287765,
+ "loss": 1.2659,
+ "step": 1432
+ },
+ {
+ "epoch": 1.4334031446344284,
+ "grad_norm": 0.6854751110076904,
+ "learning_rate": 0.0001068606166070712,
+ "loss": 1.4407,
+ "step": 1433
+ },
+ {
+ "epoch": 1.4344034259635523,
+ "grad_norm": 0.6936755180358887,
+ "learning_rate": 0.00010675603526268785,
+ "loss": 1.4219,
+ "step": 1434
+ },
+ {
+ "epoch": 1.435403707292676,
+ "grad_norm": 0.8174700736999512,
+ "learning_rate": 0.00010665144649464356,
+ "loss": 1.397,
+ "step": 1435
+ },
+ {
+ "epoch": 1.4364039886217999,
+ "grad_norm": 0.7231045365333557,
+ "learning_rate": 0.00010654685041786249,
+ "loss": 1.4558,
+ "step": 1436
+ },
+ {
+ "epoch": 1.4374042699509237,
+ "grad_norm": 0.6431431174278259,
+ "learning_rate": 0.00010644224714727681,
+ "loss": 1.3522,
+ "step": 1437
+ },
+ {
+ "epoch": 1.4384045512800476,
+ "grad_norm": 0.7025414109230042,
+ "learning_rate": 0.0001063376367978266,
+ "loss": 1.2723,
+ "step": 1438
+ },
+ {
+ "epoch": 1.4394048326091713,
+ "grad_norm": 0.6382789611816406,
+ "learning_rate": 0.00010623301948445971,
+ "loss": 1.4065,
+ "step": 1439
+ },
+ {
+ "epoch": 1.4404051139382952,
+ "grad_norm": 0.7055328488349915,
+ "learning_rate": 0.00010612839532213164,
+ "loss": 1.6186,
+ "step": 1440
+ },
+ {
+ "epoch": 1.4414053952674188,
+ "grad_norm": 0.6769623160362244,
+ "learning_rate": 0.00010602376442580544,
+ "loss": 1.49,
+ "step": 1441
+ },
+ {
+ "epoch": 1.4424056765965427,
+ "grad_norm": 0.6875097751617432,
+ "learning_rate": 0.00010591912691045152,
+ "loss": 1.3063,
+ "step": 1442
+ },
+ {
+ "epoch": 1.4434059579256666,
+ "grad_norm": 0.7747283577919006,
+ "learning_rate": 0.00010581448289104758,
+ "loss": 1.67,
+ "step": 1443
+ },
+ {
+ "epoch": 1.4444062392547905,
+ "grad_norm": 0.7236614227294922,
+ "learning_rate": 0.00010570983248257853,
+ "loss": 1.4703,
+ "step": 1444
+ },
+ {
+ "epoch": 1.4454065205839142,
+ "grad_norm": 0.7141956686973572,
+ "learning_rate": 0.00010560517580003617,
+ "loss": 1.5828,
+ "step": 1445
+ },
+ {
+ "epoch": 1.446406801913038,
+ "grad_norm": 0.679790198802948,
+ "learning_rate": 0.00010550051295841931,
+ "loss": 1.4706,
+ "step": 1446
+ },
+ {
+ "epoch": 1.447407083242162,
+ "grad_norm": 0.7930448651313782,
+ "learning_rate": 0.00010539584407273349,
+ "loss": 1.5388,
+ "step": 1447
+ },
+ {
+ "epoch": 1.4484073645712856,
+ "grad_norm": 0.7099994421005249,
+ "learning_rate": 0.00010529116925799085,
+ "loss": 1.408,
+ "step": 1448
+ },
+ {
+ "epoch": 1.4494076459004095,
+ "grad_norm": 0.6459046602249146,
+ "learning_rate": 0.00010518648862921012,
+ "loss": 1.3263,
+ "step": 1449
+ },
+ {
+ "epoch": 1.4504079272295334,
+ "grad_norm": 0.761446475982666,
+ "learning_rate": 0.00010508180230141635,
+ "loss": 1.3166,
+ "step": 1450
+ },
+ {
+ "epoch": 1.451408208558657,
+ "grad_norm": 0.6198295950889587,
+ "learning_rate": 0.00010497711038964086,
+ "loss": 1.3191,
+ "step": 1451
+ },
+ {
+ "epoch": 1.452408489887781,
+ "grad_norm": 0.6751184463500977,
+ "learning_rate": 0.0001048724130089212,
+ "loss": 1.1869,
+ "step": 1452
+ },
+ {
+ "epoch": 1.4534087712169048,
+ "grad_norm": 0.6853645443916321,
+ "learning_rate": 0.00010476771027430086,
+ "loss": 1.2579,
+ "step": 1453
+ },
+ {
+ "epoch": 1.4544090525460285,
+ "grad_norm": 0.6402629017829895,
+ "learning_rate": 0.00010466300230082911,
+ "loss": 1.3192,
+ "step": 1454
+ },
+ {
+ "epoch": 1.4554093338751524,
+ "grad_norm": 0.7862108945846558,
+ "learning_rate": 0.00010455828920356115,
+ "loss": 1.3619,
+ "step": 1455
+ },
+ {
+ "epoch": 1.4564096152042763,
+ "grad_norm": 0.7008057832717896,
+ "learning_rate": 0.00010445357109755771,
+ "loss": 1.3524,
+ "step": 1456
+ },
+ {
+ "epoch": 1.4574098965334001,
+ "grad_norm": 0.6477895379066467,
+ "learning_rate": 0.00010434884809788508,
+ "loss": 1.6172,
+ "step": 1457
+ },
+ {
+ "epoch": 1.4584101778625238,
+ "grad_norm": 0.6312345862388611,
+ "learning_rate": 0.00010424412031961484,
+ "loss": 1.2121,
+ "step": 1458
+ },
+ {
+ "epoch": 1.4594104591916477,
+ "grad_norm": 0.6922104358673096,
+ "learning_rate": 0.00010413938787782394,
+ "loss": 1.3243,
+ "step": 1459
+ },
+ {
+ "epoch": 1.4604107405207714,
+ "grad_norm": 0.670599102973938,
+ "learning_rate": 0.00010403465088759437,
+ "loss": 1.346,
+ "step": 1460
+ },
+ {
+ "epoch": 1.4614110218498952,
+ "grad_norm": 0.7351789474487305,
+ "learning_rate": 0.00010392990946401313,
+ "loss": 1.5343,
+ "step": 1461
+ },
+ {
+ "epoch": 1.4624113031790191,
+ "grad_norm": 0.6756190061569214,
+ "learning_rate": 0.00010382516372217215,
+ "loss": 1.4118,
+ "step": 1462
+ },
+ {
+ "epoch": 1.463411584508143,
+ "grad_norm": 0.6219121217727661,
+ "learning_rate": 0.000103720413777168,
+ "loss": 1.3602,
+ "step": 1463
+ },
+ {
+ "epoch": 1.4644118658372667,
+ "grad_norm": 0.6602663993835449,
+ "learning_rate": 0.00010361565974410192,
+ "loss": 1.4921,
+ "step": 1464
+ },
+ {
+ "epoch": 1.4654121471663906,
+ "grad_norm": 0.6103453636169434,
+ "learning_rate": 0.00010351090173807969,
+ "loss": 1.3259,
+ "step": 1465
+ },
+ {
+ "epoch": 1.4664124284955142,
+ "grad_norm": 0.745473325252533,
+ "learning_rate": 0.00010340613987421137,
+ "loss": 1.6036,
+ "step": 1466
+ },
+ {
+ "epoch": 1.4674127098246381,
+ "grad_norm": 0.6537976861000061,
+ "learning_rate": 0.00010330137426761135,
+ "loss": 1.3511,
+ "step": 1467
+ },
+ {
+ "epoch": 1.468412991153762,
+ "grad_norm": 0.7108463048934937,
+ "learning_rate": 0.00010319660503339808,
+ "loss": 1.4814,
+ "step": 1468
+ },
+ {
+ "epoch": 1.469413272482886,
+ "grad_norm": 0.6372820734977722,
+ "learning_rate": 0.00010309183228669397,
+ "loss": 1.468,
+ "step": 1469
+ },
+ {
+ "epoch": 1.4704135538120096,
+ "grad_norm": 0.6098326444625854,
+ "learning_rate": 0.00010298705614262532,
+ "loss": 1.6763,
+ "step": 1470
+ },
+ {
+ "epoch": 1.4714138351411334,
+ "grad_norm": 0.6385009288787842,
+ "learning_rate": 0.0001028822767163222,
+ "loss": 1.3058,
+ "step": 1471
+ },
+ {
+ "epoch": 1.4724141164702573,
+ "grad_norm": 0.6848032474517822,
+ "learning_rate": 0.00010277749412291824,
+ "loss": 1.4115,
+ "step": 1472
+ },
+ {
+ "epoch": 1.473414397799381,
+ "grad_norm": 0.7532572746276855,
+ "learning_rate": 0.00010267270847755048,
+ "loss": 1.4219,
+ "step": 1473
+ },
+ {
+ "epoch": 1.4744146791285049,
+ "grad_norm": 0.7336605787277222,
+ "learning_rate": 0.00010256791989535952,
+ "loss": 1.4092,
+ "step": 1474
+ },
+ {
+ "epoch": 1.4754149604576288,
+ "grad_norm": 0.6300507187843323,
+ "learning_rate": 0.00010246312849148899,
+ "loss": 1.2911,
+ "step": 1475
+ },
+ {
+ "epoch": 1.4764152417867527,
+ "grad_norm": 0.7114218473434448,
+ "learning_rate": 0.00010235833438108571,
+ "loss": 1.5038,
+ "step": 1476
+ },
+ {
+ "epoch": 1.4774155231158763,
+ "grad_norm": 0.7215398550033569,
+ "learning_rate": 0.00010225353767929944,
+ "loss": 1.4919,
+ "step": 1477
+ },
+ {
+ "epoch": 1.4784158044450002,
+ "grad_norm": 0.6189507246017456,
+ "learning_rate": 0.00010214873850128282,
+ "loss": 1.2092,
+ "step": 1478
+ },
+ {
+ "epoch": 1.4794160857741239,
+ "grad_norm": 0.5806283950805664,
+ "learning_rate": 0.00010204393696219117,
+ "loss": 1.2862,
+ "step": 1479
+ },
+ {
+ "epoch": 1.4804163671032478,
+ "grad_norm": 0.7068900465965271,
+ "learning_rate": 0.00010193913317718244,
+ "loss": 1.319,
+ "step": 1480
+ },
+ {
+ "epoch": 1.4814166484323716,
+ "grad_norm": 0.749792218208313,
+ "learning_rate": 0.00010183432726141706,
+ "loss": 1.3661,
+ "step": 1481
+ },
+ {
+ "epoch": 1.4824169297614955,
+ "grad_norm": 0.7314055562019348,
+ "learning_rate": 0.00010172951933005775,
+ "loss": 1.5695,
+ "step": 1482
+ },
+ {
+ "epoch": 1.4834172110906192,
+ "grad_norm": 0.6871920228004456,
+ "learning_rate": 0.00010162470949826948,
+ "loss": 1.3598,
+ "step": 1483
+ },
+ {
+ "epoch": 1.484417492419743,
+ "grad_norm": 0.7139384150505066,
+ "learning_rate": 0.0001015198978812193,
+ "loss": 1.4942,
+ "step": 1484
+ },
+ {
+ "epoch": 1.4854177737488667,
+ "grad_norm": 0.6459400653839111,
+ "learning_rate": 0.00010141508459407623,
+ "loss": 1.3971,
+ "step": 1485
+ },
+ {
+ "epoch": 1.4864180550779906,
+ "grad_norm": 0.8157202005386353,
+ "learning_rate": 0.0001013102697520111,
+ "loss": 1.4679,
+ "step": 1486
+ },
+ {
+ "epoch": 1.4874183364071145,
+ "grad_norm": 0.6978387832641602,
+ "learning_rate": 0.00010120545347019647,
+ "loss": 1.4547,
+ "step": 1487
+ },
+ {
+ "epoch": 1.4884186177362384,
+ "grad_norm": 0.641835629940033,
+ "learning_rate": 0.00010110063586380646,
+ "loss": 1.6611,
+ "step": 1488
+ },
+ {
+ "epoch": 1.489418899065362,
+ "grad_norm": 0.723709225654602,
+ "learning_rate": 0.00010099581704801673,
+ "loss": 1.3994,
+ "step": 1489
+ },
+ {
+ "epoch": 1.490419180394486,
+ "grad_norm": 0.6613619327545166,
+ "learning_rate": 0.00010089099713800414,
+ "loss": 1.5722,
+ "step": 1490
+ },
+ {
+ "epoch": 1.4914194617236098,
+ "grad_norm": 0.6406750082969666,
+ "learning_rate": 0.00010078617624894684,
+ "loss": 1.312,
+ "step": 1491
+ },
+ {
+ "epoch": 1.4924197430527335,
+ "grad_norm": 0.5216225385665894,
+ "learning_rate": 0.000100681354496024,
+ "loss": 1.3552,
+ "step": 1492
+ },
+ {
+ "epoch": 1.4934200243818574,
+ "grad_norm": 0.7549086809158325,
+ "learning_rate": 0.00010057653199441581,
+ "loss": 1.4344,
+ "step": 1493
+ },
+ {
+ "epoch": 1.4944203057109813,
+ "grad_norm": 0.6958007216453552,
+ "learning_rate": 0.00010047170885930324,
+ "loss": 1.254,
+ "step": 1494
+ },
+ {
+ "epoch": 1.4954205870401052,
+ "grad_norm": 0.706564724445343,
+ "learning_rate": 0.00010036688520586788,
+ "loss": 1.4854,
+ "step": 1495
+ },
+ {
+ "epoch": 1.4964208683692288,
+ "grad_norm": 0.6802704930305481,
+ "learning_rate": 0.00010026206114929209,
+ "loss": 1.4631,
+ "step": 1496
+ },
+ {
+ "epoch": 1.4974211496983527,
+ "grad_norm": 0.645449697971344,
+ "learning_rate": 0.00010015723680475846,
+ "loss": 1.5165,
+ "step": 1497
+ },
+ {
+ "epoch": 1.4984214310274764,
+ "grad_norm": 0.5729085206985474,
+ "learning_rate": 0.00010005241228745004,
+ "loss": 1.2683,
+ "step": 1498
+ },
+ {
+ "epoch": 1.4994217123566003,
+ "grad_norm": 0.6592169404029846,
+ "learning_rate": 9.994758771254997e-05,
+ "loss": 1.4722,
+ "step": 1499
+ },
+ {
+ "epoch": 1.5004219936857242,
+ "grad_norm": 0.6299737691879272,
+ "learning_rate": 9.984276319524154e-05,
+ "loss": 1.3664,
+ "step": 1500
+ },
+ {
+ "epoch": 1.501422275014848,
+ "grad_norm": 0.6960833072662354,
+ "learning_rate": 9.973793885070792e-05,
+ "loss": 1.3959,
+ "step": 1501
+ },
+ {
+ "epoch": 1.5024225563439717,
+ "grad_norm": 0.7174006700515747,
+ "learning_rate": 9.963311479413211e-05,
+ "loss": 1.5448,
+ "step": 1502
+ },
+ {
+ "epoch": 1.5034228376730956,
+ "grad_norm": 0.6396325826644897,
+ "learning_rate": 9.95282911406968e-05,
+ "loss": 1.5168,
+ "step": 1503
+ },
+ {
+ "epoch": 1.5044231190022193,
+ "grad_norm": 0.809868335723877,
+ "learning_rate": 9.942346800558421e-05,
+ "loss": 1.4467,
+ "step": 1504
+ },
+ {
+ "epoch": 1.5054234003313431,
+ "grad_norm": 0.6106623411178589,
+ "learning_rate": 9.931864550397601e-05,
+ "loss": 1.519,
+ "step": 1505
+ },
+ {
+ "epoch": 1.506423681660467,
+ "grad_norm": 0.7253887057304382,
+ "learning_rate": 9.921382375105318e-05,
+ "loss": 1.3442,
+ "step": 1506
+ },
+ {
+ "epoch": 1.507423962989591,
+ "grad_norm": 0.6945338249206543,
+ "learning_rate": 9.910900286199587e-05,
+ "loss": 1.5319,
+ "step": 1507
+ },
+ {
+ "epoch": 1.5084242443187148,
+ "grad_norm": 0.682486891746521,
+ "learning_rate": 9.900418295198328e-05,
+ "loss": 1.3558,
+ "step": 1508
+ },
+ {
+ "epoch": 1.5094245256478385,
+ "grad_norm": 0.6952700018882751,
+ "learning_rate": 9.889936413619356e-05,
+ "loss": 1.3194,
+ "step": 1509
+ },
+ {
+ "epoch": 1.5104248069769621,
+ "grad_norm": 0.6775678992271423,
+ "learning_rate": 9.879454652980358e-05,
+ "loss": 1.5936,
+ "step": 1510
+ },
+ {
+ "epoch": 1.511425088306086,
+ "grad_norm": 0.8723187446594238,
+ "learning_rate": 9.868973024798895e-05,
+ "loss": 1.4172,
+ "step": 1511
+ },
+ {
+ "epoch": 1.51242536963521,
+ "grad_norm": 0.8881109952926636,
+ "learning_rate": 9.858491540592382e-05,
+ "loss": 1.4405,
+ "step": 1512
+ },
+ {
+ "epoch": 1.5134256509643338,
+ "grad_norm": 0.6580207347869873,
+ "learning_rate": 9.848010211878074e-05,
+ "loss": 1.6241,
+ "step": 1513
+ },
+ {
+ "epoch": 1.5144259322934577,
+ "grad_norm": 0.6160255074501038,
+ "learning_rate": 9.837529050173052e-05,
+ "loss": 1.4222,
+ "step": 1514
+ },
+ {
+ "epoch": 1.5154262136225813,
+ "grad_norm": 0.7516399025917053,
+ "learning_rate": 9.827048066994225e-05,
+ "loss": 1.2495,
+ "step": 1515
+ },
+ {
+ "epoch": 1.516426494951705,
+ "grad_norm": 0.5907468795776367,
+ "learning_rate": 9.816567273858296e-05,
+ "loss": 1.1453,
+ "step": 1516
+ },
+ {
+ "epoch": 1.517426776280829,
+ "grad_norm": 0.7371746301651001,
+ "learning_rate": 9.806086682281758e-05,
+ "loss": 1.5006,
+ "step": 1517
+ },
+ {
+ "epoch": 1.5184270576099528,
+ "grad_norm": 0.6646453142166138,
+ "learning_rate": 9.795606303780885e-05,
+ "loss": 1.5237,
+ "step": 1518
+ },
+ {
+ "epoch": 1.5194273389390767,
+ "grad_norm": 0.7043606638908386,
+ "learning_rate": 9.785126149871722e-05,
+ "loss": 1.3339,
+ "step": 1519
+ },
+ {
+ "epoch": 1.5204276202682006,
+ "grad_norm": 0.8110997676849365,
+ "learning_rate": 9.77464623207006e-05,
+ "loss": 1.5193,
+ "step": 1520
+ },
+ {
+ "epoch": 1.5214279015973242,
+ "grad_norm": 0.6463339328765869,
+ "learning_rate": 9.764166561891432e-05,
+ "loss": 1.412,
+ "step": 1521
+ },
+ {
+ "epoch": 1.522428182926448,
+ "grad_norm": 0.7538262605667114,
+ "learning_rate": 9.753687150851102e-05,
+ "loss": 1.5326,
+ "step": 1522
+ },
+ {
+ "epoch": 1.5234284642555718,
+ "grad_norm": 0.7361929416656494,
+ "learning_rate": 9.74320801046405e-05,
+ "loss": 1.4104,
+ "step": 1523
+ },
+ {
+ "epoch": 1.5244287455846957,
+ "grad_norm": 0.7260544896125793,
+ "learning_rate": 9.732729152244953e-05,
+ "loss": 1.6037,
+ "step": 1524
+ },
+ {
+ "epoch": 1.5254290269138195,
+ "grad_norm": 0.6636849641799927,
+ "learning_rate": 9.722250587708181e-05,
+ "loss": 1.2908,
+ "step": 1525
+ },
+ {
+ "epoch": 1.5264293082429434,
+ "grad_norm": 0.7487931251525879,
+ "learning_rate": 9.711772328367784e-05,
+ "loss": 1.5143,
+ "step": 1526
+ },
+ {
+ "epoch": 1.527429589572067,
+ "grad_norm": 0.7224540710449219,
+ "learning_rate": 9.70129438573747e-05,
+ "loss": 1.4773,
+ "step": 1527
+ },
+ {
+ "epoch": 1.528429870901191,
+ "grad_norm": 0.6425575017929077,
+ "learning_rate": 9.690816771330608e-05,
+ "loss": 1.329,
+ "step": 1528
+ },
+ {
+ "epoch": 1.5294301522303146,
+ "grad_norm": 0.6902957558631897,
+ "learning_rate": 9.680339496660192e-05,
+ "loss": 1.3085,
+ "step": 1529
+ },
+ {
+ "epoch": 1.5304304335594385,
+ "grad_norm": 0.6722397804260254,
+ "learning_rate": 9.669862573238863e-05,
+ "loss": 1.5163,
+ "step": 1530
+ },
+ {
+ "epoch": 1.5314307148885624,
+ "grad_norm": 0.7586985230445862,
+ "learning_rate": 9.659386012578863e-05,
+ "loss": 1.3072,
+ "step": 1531
+ },
+ {
+ "epoch": 1.5324309962176863,
+ "grad_norm": 0.7313751578330994,
+ "learning_rate": 9.648909826192033e-05,
+ "loss": 1.4071,
+ "step": 1532
+ },
+ {
+ "epoch": 1.5334312775468102,
+ "grad_norm": 0.6215599775314331,
+ "learning_rate": 9.63843402558981e-05,
+ "loss": 1.4255,
+ "step": 1533
+ },
+ {
+ "epoch": 1.5344315588759339,
+ "grad_norm": 0.7188824415206909,
+ "learning_rate": 9.627958622283203e-05,
+ "loss": 1.4148,
+ "step": 1534
+ },
+ {
+ "epoch": 1.5354318402050575,
+ "grad_norm": 0.6444137692451477,
+ "learning_rate": 9.617483627782788e-05,
+ "loss": 1.6461,
+ "step": 1535
+ },
+ {
+ "epoch": 1.5364321215341814,
+ "grad_norm": 0.7150428295135498,
+ "learning_rate": 9.607009053598689e-05,
+ "loss": 1.5799,
+ "step": 1536
+ },
+ {
+ "epoch": 1.5374324028633053,
+ "grad_norm": 0.6654619574546814,
+ "learning_rate": 9.596534911240566e-05,
+ "loss": 1.3762,
+ "step": 1537
+ },
+ {
+ "epoch": 1.5384326841924292,
+ "grad_norm": 0.5966542363166809,
+ "learning_rate": 9.58606121221761e-05,
+ "loss": 1.4305,
+ "step": 1538
+ },
+ {
+ "epoch": 1.539432965521553,
+ "grad_norm": 0.6061896085739136,
+ "learning_rate": 9.57558796803852e-05,
+ "loss": 1.2492,
+ "step": 1539
+ },
+ {
+ "epoch": 1.5404332468506767,
+ "grad_norm": 0.7098972797393799,
+ "learning_rate": 9.565115190211497e-05,
+ "loss": 1.3917,
+ "step": 1540
+ },
+ {
+ "epoch": 1.5414335281798006,
+ "grad_norm": 0.7661631107330322,
+ "learning_rate": 9.554642890244233e-05,
+ "loss": 1.5048,
+ "step": 1541
+ },
+ {
+ "epoch": 1.5424338095089243,
+ "grad_norm": 2.214449405670166,
+ "learning_rate": 9.54417107964389e-05,
+ "loss": 1.3739,
+ "step": 1542
+ },
+ {
+ "epoch": 1.5434340908380482,
+ "grad_norm": 0.6061079502105713,
+ "learning_rate": 9.533699769917092e-05,
+ "loss": 1.4466,
+ "step": 1543
+ },
+ {
+ "epoch": 1.544434372167172,
+ "grad_norm": 0.76081383228302,
+ "learning_rate": 9.523228972569917e-05,
+ "loss": 1.2733,
+ "step": 1544
+ },
+ {
+ "epoch": 1.545434653496296,
+ "grad_norm": 0.6445167064666748,
+ "learning_rate": 9.512758699107879e-05,
+ "loss": 1.404,
+ "step": 1545
+ },
+ {
+ "epoch": 1.5464349348254196,
+ "grad_norm": 0.53884357213974,
+ "learning_rate": 9.502288961035912e-05,
+ "loss": 1.2348,
+ "step": 1546
+ },
+ {
+ "epoch": 1.5474352161545435,
+ "grad_norm": 0.589690625667572,
+ "learning_rate": 9.491819769858366e-05,
+ "loss": 1.2912,
+ "step": 1547
+ },
+ {
+ "epoch": 1.5484354974836672,
+ "grad_norm": 0.6259596347808838,
+ "learning_rate": 9.48135113707899e-05,
+ "loss": 1.4254,
+ "step": 1548
+ },
+ {
+ "epoch": 1.549435778812791,
+ "grad_norm": 0.5648382306098938,
+ "learning_rate": 9.470883074200916e-05,
+ "loss": 1.205,
+ "step": 1549
+ },
+ {
+ "epoch": 1.550436060141915,
+ "grad_norm": 0.6659985780715942,
+ "learning_rate": 9.460415592726653e-05,
+ "loss": 1.2726,
+ "step": 1550
+ },
+ {
+ "epoch": 1.5514363414710388,
+ "grad_norm": 0.6457205414772034,
+ "learning_rate": 9.449948704158071e-05,
+ "loss": 1.4254,
+ "step": 1551
+ },
+ {
+ "epoch": 1.5524366228001627,
+ "grad_norm": 0.753474235534668,
+ "learning_rate": 9.439482419996384e-05,
+ "loss": 1.3446,
+ "step": 1552
+ },
+ {
+ "epoch": 1.5534369041292864,
+ "grad_norm": 0.6353628039360046,
+ "learning_rate": 9.42901675174215e-05,
+ "loss": 1.2947,
+ "step": 1553
+ },
+ {
+ "epoch": 1.55443718545841,
+ "grad_norm": 0.5484879016876221,
+ "learning_rate": 9.418551710895243e-05,
+ "loss": 1.3678,
+ "step": 1554
+ },
+ {
+ "epoch": 1.555437466787534,
+ "grad_norm": 0.564643919467926,
+ "learning_rate": 9.408087308954853e-05,
+ "loss": 1.2557,
+ "step": 1555
+ },
+ {
+ "epoch": 1.5564377481166578,
+ "grad_norm": 0.6570972800254822,
+ "learning_rate": 9.397623557419461e-05,
+ "loss": 1.3769,
+ "step": 1556
+ },
+ {
+ "epoch": 1.5574380294457817,
+ "grad_norm": 0.5943097472190857,
+ "learning_rate": 9.38716046778684e-05,
+ "loss": 1.2613,
+ "step": 1557
+ },
+ {
+ "epoch": 1.5584383107749056,
+ "grad_norm": 0.6349796056747437,
+ "learning_rate": 9.37669805155403e-05,
+ "loss": 1.4075,
+ "step": 1558
+ },
+ {
+ "epoch": 1.5594385921040292,
+ "grad_norm": 0.6645040512084961,
+ "learning_rate": 9.366236320217339e-05,
+ "loss": 1.6224,
+ "step": 1559
+ },
+ {
+ "epoch": 1.5604388734331531,
+ "grad_norm": 0.7049742937088013,
+ "learning_rate": 9.355775285272318e-05,
+ "loss": 1.3948,
+ "step": 1560
+ },
+ {
+ "epoch": 1.5614391547622768,
+ "grad_norm": 0.7328057885169983,
+ "learning_rate": 9.34531495821375e-05,
+ "loss": 1.4479,
+ "step": 1561
+ },
+ {
+ "epoch": 1.5624394360914007,
+ "grad_norm": 0.6629959344863892,
+ "learning_rate": 9.334855350535645e-05,
+ "loss": 1.4399,
+ "step": 1562
+ },
+ {
+ "epoch": 1.5634397174205246,
+ "grad_norm": 0.5964148640632629,
+ "learning_rate": 9.324396473731217e-05,
+ "loss": 1.3064,
+ "step": 1563
+ },
+ {
+ "epoch": 1.5644399987496485,
+ "grad_norm": 0.6908231377601624,
+ "learning_rate": 9.313938339292883e-05,
+ "loss": 1.234,
+ "step": 1564
+ },
+ {
+ "epoch": 1.5654402800787721,
+ "grad_norm": 0.6026841402053833,
+ "learning_rate": 9.303480958712239e-05,
+ "loss": 1.2906,
+ "step": 1565
+ },
+ {
+ "epoch": 1.566440561407896,
+ "grad_norm": 0.5934796333312988,
+ "learning_rate": 9.293024343480055e-05,
+ "loss": 1.1448,
+ "step": 1566
+ },
+ {
+ "epoch": 1.5674408427370197,
+ "grad_norm": 0.6869467496871948,
+ "learning_rate": 9.282568505086261e-05,
+ "loss": 1.283,
+ "step": 1567
+ },
+ {
+ "epoch": 1.5684411240661436,
+ "grad_norm": 0.6558713912963867,
+ "learning_rate": 9.272113455019935e-05,
+ "loss": 1.502,
+ "step": 1568
+ },
+ {
+ "epoch": 1.5694414053952674,
+ "grad_norm": 0.6627963781356812,
+ "learning_rate": 9.261659204769284e-05,
+ "loss": 1.2985,
+ "step": 1569
+ },
+ {
+ "epoch": 1.5704416867243913,
+ "grad_norm": 0.7012712359428406,
+ "learning_rate": 9.251205765821636e-05,
+ "loss": 1.3899,
+ "step": 1570
+ },
+ {
+ "epoch": 1.5714419680535152,
+ "grad_norm": 0.6767538785934448,
+ "learning_rate": 9.240753149663433e-05,
+ "loss": 1.4869,
+ "step": 1571
+ },
+ {
+ "epoch": 1.5724422493826389,
+ "grad_norm": 0.6274527311325073,
+ "learning_rate": 9.230301367780208e-05,
+ "loss": 1.4491,
+ "step": 1572
+ },
+ {
+ "epoch": 1.5734425307117625,
+ "grad_norm": 0.6079627275466919,
+ "learning_rate": 9.219850431656579e-05,
+ "loss": 1.247,
+ "step": 1573
+ },
+ {
+ "epoch": 1.5744428120408864,
+ "grad_norm": 0.9947478175163269,
+ "learning_rate": 9.209400352776237e-05,
+ "loss": 1.3117,
+ "step": 1574
+ },
+ {
+ "epoch": 1.5754430933700103,
+ "grad_norm": 0.6441598534584045,
+ "learning_rate": 9.198951142621929e-05,
+ "loss": 1.2931,
+ "step": 1575
+ },
+ {
+ "epoch": 1.5764433746991342,
+ "grad_norm": 0.5995433926582336,
+ "learning_rate": 9.188502812675446e-05,
+ "loss": 1.3293,
+ "step": 1576
+ },
+ {
+ "epoch": 1.577443656028258,
+ "grad_norm": 0.5997470617294312,
+ "learning_rate": 9.178055374417612e-05,
+ "loss": 1.2542,
+ "step": 1577
+ },
+ {
+ "epoch": 1.5784439373573818,
+ "grad_norm": 0.7376891374588013,
+ "learning_rate": 9.167608839328272e-05,
+ "loss": 1.4369,
+ "step": 1578
+ },
+ {
+ "epoch": 1.5794442186865054,
+ "grad_norm": 0.7353281378746033,
+ "learning_rate": 9.15716321888628e-05,
+ "loss": 1.6255,
+ "step": 1579
+ },
+ {
+ "epoch": 1.5804445000156293,
+ "grad_norm": 0.6899515986442566,
+ "learning_rate": 9.146718524569487e-05,
+ "loss": 1.2246,
+ "step": 1580
+ },
+ {
+ "epoch": 1.5814447813447532,
+ "grad_norm": 0.6453947424888611,
+ "learning_rate": 9.136274767854716e-05,
+ "loss": 1.3967,
+ "step": 1581
+ },
+ {
+ "epoch": 1.582445062673877,
+ "grad_norm": 0.7168171405792236,
+ "learning_rate": 9.125831960217774e-05,
+ "loss": 1.3952,
+ "step": 1582
+ },
+ {
+ "epoch": 1.583445344003001,
+ "grad_norm": 0.6337130665779114,
+ "learning_rate": 9.115390113133414e-05,
+ "loss": 1.1972,
+ "step": 1583
+ },
+ {
+ "epoch": 1.5844456253321246,
+ "grad_norm": 0.7215299606323242,
+ "learning_rate": 9.104949238075336e-05,
+ "loss": 1.3696,
+ "step": 1584
+ },
+ {
+ "epoch": 1.5854459066612485,
+ "grad_norm": 0.7228485941886902,
+ "learning_rate": 9.094509346516178e-05,
+ "loss": 1.3858,
+ "step": 1585
+ },
+ {
+ "epoch": 1.5864461879903722,
+ "grad_norm": 0.6178514957427979,
+ "learning_rate": 9.084070449927488e-05,
+ "loss": 1.1581,
+ "step": 1586
+ },
+ {
+ "epoch": 1.587446469319496,
+ "grad_norm": 0.5726553201675415,
+ "learning_rate": 9.07363255977973e-05,
+ "loss": 1.265,
+ "step": 1587
+ },
+ {
+ "epoch": 1.58844675064862,
+ "grad_norm": 0.6116858124732971,
+ "learning_rate": 9.063195687542249e-05,
+ "loss": 1.3056,
+ "step": 1588
+ },
+ {
+ "epoch": 1.5894470319777438,
+ "grad_norm": 0.6042388677597046,
+ "learning_rate": 9.052759844683295e-05,
+ "loss": 1.3403,
+ "step": 1589
+ },
+ {
+ "epoch": 1.5904473133068675,
+ "grad_norm": 0.6811801791191101,
+ "learning_rate": 9.042325042669961e-05,
+ "loss": 1.4842,
+ "step": 1590
+ },
+ {
+ "epoch": 1.5914475946359914,
+ "grad_norm": 0.591401994228363,
+ "learning_rate": 9.03189129296821e-05,
+ "loss": 0.975,
+ "step": 1591
+ },
+ {
+ "epoch": 1.592447875965115,
+ "grad_norm": 0.5907956957817078,
+ "learning_rate": 9.021458607042845e-05,
+ "loss": 1.2758,
+ "step": 1592
+ },
+ {
+ "epoch": 1.593448157294239,
+ "grad_norm": 0.7272189259529114,
+ "learning_rate": 9.011026996357503e-05,
+ "loss": 1.3661,
+ "step": 1593
+ },
+ {
+ "epoch": 1.5944484386233628,
+ "grad_norm": 0.652340829372406,
+ "learning_rate": 9.000596472374637e-05,
+ "loss": 1.3547,
+ "step": 1594
+ },
+ {
+ "epoch": 1.5954487199524867,
+ "grad_norm": 0.8212108016014099,
+ "learning_rate": 8.990167046555504e-05,
+ "loss": 1.2207,
+ "step": 1595
+ },
+ {
+ "epoch": 1.5964490012816106,
+ "grad_norm": 0.6061079502105713,
+ "learning_rate": 8.97973873036016e-05,
+ "loss": 1.2555,
+ "step": 1596
+ },
+ {
+ "epoch": 1.5974492826107343,
+ "grad_norm": 0.5761566162109375,
+ "learning_rate": 8.969311535247438e-05,
+ "loss": 1.3051,
+ "step": 1597
+ },
+ {
+ "epoch": 1.598449563939858,
+ "grad_norm": 0.6714027523994446,
+ "learning_rate": 8.958885472674939e-05,
+ "loss": 1.4451,
+ "step": 1598
+ },
+ {
+ "epoch": 1.5994498452689818,
+ "grad_norm": 0.6701240539550781,
+ "learning_rate": 8.948460554099018e-05,
+ "loss": 1.4353,
+ "step": 1599
+ },
+ {
+ "epoch": 1.6004501265981057,
+ "grad_norm": 0.7223709225654602,
+ "learning_rate": 8.93803679097478e-05,
+ "loss": 1.4029,
+ "step": 1600
+ },
+ {
+ "epoch": 1.6014504079272296,
+ "grad_norm": 0.6414337158203125,
+ "learning_rate": 8.927614194756052e-05,
+ "loss": 1.304,
+ "step": 1601
+ },
+ {
+ "epoch": 1.6024506892563535,
+ "grad_norm": 0.6110413670539856,
+ "learning_rate": 8.917192776895382e-05,
+ "loss": 1.2504,
+ "step": 1602
+ },
+ {
+ "epoch": 1.6034509705854771,
+ "grad_norm": 0.6857700943946838,
+ "learning_rate": 8.906772548844026e-05,
+ "loss": 1.4735,
+ "step": 1603
+ },
+ {
+ "epoch": 1.604451251914601,
+ "grad_norm": 0.7300008535385132,
+ "learning_rate": 8.896353522051928e-05,
+ "loss": 1.753,
+ "step": 1604
+ },
+ {
+ "epoch": 1.6054515332437247,
+ "grad_norm": 0.6020368933677673,
+ "learning_rate": 8.885935707967716e-05,
+ "loss": 1.2869,
+ "step": 1605
+ },
+ {
+ "epoch": 1.6064518145728486,
+ "grad_norm": 0.5244629979133606,
+ "learning_rate": 8.875519118038684e-05,
+ "loss": 1.432,
+ "step": 1606
+ },
+ {
+ "epoch": 1.6074520959019725,
+ "grad_norm": 0.6224693059921265,
+ "learning_rate": 8.865103763710777e-05,
+ "loss": 1.6618,
+ "step": 1607
+ },
+ {
+ "epoch": 1.6084523772310964,
+ "grad_norm": 0.6111294627189636,
+ "learning_rate": 8.854689656428591e-05,
+ "loss": 1.4098,
+ "step": 1608
+ },
+ {
+ "epoch": 1.60945265856022,
+ "grad_norm": 0.6179168820381165,
+ "learning_rate": 8.844276807635343e-05,
+ "loss": 1.1749,
+ "step": 1609
+ },
+ {
+ "epoch": 1.610452939889344,
+ "grad_norm": 0.658416748046875,
+ "learning_rate": 8.833865228772871e-05,
+ "loss": 1.3686,
+ "step": 1610
+ },
+ {
+ "epoch": 1.6114532212184676,
+ "grad_norm": 0.5781399607658386,
+ "learning_rate": 8.823454931281616e-05,
+ "loss": 1.3618,
+ "step": 1611
+ },
+ {
+ "epoch": 1.6124535025475915,
+ "grad_norm": 0.6480880975723267,
+ "learning_rate": 8.813045926600615e-05,
+ "loss": 1.4162,
+ "step": 1612
+ },
+ {
+ "epoch": 1.6134537838767153,
+ "grad_norm": 0.5510106682777405,
+ "learning_rate": 8.802638226167479e-05,
+ "loss": 1.1625,
+ "step": 1613
+ },
+ {
+ "epoch": 1.6144540652058392,
+ "grad_norm": 0.6937603950500488,
+ "learning_rate": 8.792231841418391e-05,
+ "loss": 1.462,
+ "step": 1614
+ },
+ {
+ "epoch": 1.6154543465349631,
+ "grad_norm": 0.8652899265289307,
+ "learning_rate": 8.781826783788084e-05,
+ "loss": 1.389,
+ "step": 1615
+ },
+ {
+ "epoch": 1.6164546278640868,
+ "grad_norm": 0.6258351802825928,
+ "learning_rate": 8.771423064709837e-05,
+ "loss": 1.322,
+ "step": 1616
+ },
+ {
+ "epoch": 1.6174549091932104,
+ "grad_norm": 0.6774043440818787,
+ "learning_rate": 8.76102069561545e-05,
+ "loss": 1.3344,
+ "step": 1617
+ },
+ {
+ "epoch": 1.6184551905223343,
+ "grad_norm": 0.6406411528587341,
+ "learning_rate": 8.750619687935251e-05,
+ "loss": 1.3929,
+ "step": 1618
+ },
+ {
+ "epoch": 1.6194554718514582,
+ "grad_norm": 0.6380543112754822,
+ "learning_rate": 8.740220053098067e-05,
+ "loss": 1.413,
+ "step": 1619
+ },
+ {
+ "epoch": 1.620455753180582,
+ "grad_norm": 0.7143612504005432,
+ "learning_rate": 8.729821802531212e-05,
+ "loss": 1.541,
+ "step": 1620
+ },
+ {
+ "epoch": 1.621456034509706,
+ "grad_norm": 0.5897488594055176,
+ "learning_rate": 8.719424947660487e-05,
+ "loss": 1.2968,
+ "step": 1621
+ },
+ {
+ "epoch": 1.6224563158388297,
+ "grad_norm": 0.6275039911270142,
+ "learning_rate": 8.70902949991015e-05,
+ "loss": 1.4192,
+ "step": 1622
+ },
+ {
+ "epoch": 1.6234565971679535,
+ "grad_norm": 0.7218581438064575,
+ "learning_rate": 8.698635470702923e-05,
+ "loss": 1.41,
+ "step": 1623
+ },
+ {
+ "epoch": 1.6244568784970772,
+ "grad_norm": 0.7635208964347839,
+ "learning_rate": 8.688242871459963e-05,
+ "loss": 1.3006,
+ "step": 1624
+ },
+ {
+ "epoch": 1.625457159826201,
+ "grad_norm": 0.746904730796814,
+ "learning_rate": 8.677851713600855e-05,
+ "loss": 1.7362,
+ "step": 1625
+ },
+ {
+ "epoch": 1.626457441155325,
+ "grad_norm": 0.6612946391105652,
+ "learning_rate": 8.667462008543603e-05,
+ "loss": 1.6304,
+ "step": 1626
+ },
+ {
+ "epoch": 1.6274577224844489,
+ "grad_norm": 0.6576234698295593,
+ "learning_rate": 8.657073767704615e-05,
+ "loss": 1.4814,
+ "step": 1627
+ },
+ {
+ "epoch": 1.6284580038135725,
+ "grad_norm": 0.6360548138618469,
+ "learning_rate": 8.646687002498692e-05,
+ "loss": 1.4134,
+ "step": 1628
+ },
+ {
+ "epoch": 1.6294582851426964,
+ "grad_norm": 0.6985300183296204,
+ "learning_rate": 8.636301724339004e-05,
+ "loss": 1.4916,
+ "step": 1629
+ },
+ {
+ "epoch": 1.63045856647182,
+ "grad_norm": 0.5798565149307251,
+ "learning_rate": 8.625917944637096e-05,
+ "loss": 1.4304,
+ "step": 1630
+ },
+ {
+ "epoch": 1.631458847800944,
+ "grad_norm": 0.6414662599563599,
+ "learning_rate": 8.615535674802865e-05,
+ "loss": 1.4268,
+ "step": 1631
+ },
+ {
+ "epoch": 1.6324591291300679,
+ "grad_norm": 0.6145825386047363,
+ "learning_rate": 8.605154926244543e-05,
+ "loss": 1.1465,
+ "step": 1632
+ },
+ {
+ "epoch": 1.6334594104591917,
+ "grad_norm": 0.6959055662155151,
+ "learning_rate": 8.594775710368704e-05,
+ "loss": 1.4171,
+ "step": 1633
+ },
+ {
+ "epoch": 1.6344596917883156,
+ "grad_norm": 0.6742627024650574,
+ "learning_rate": 8.584398038580226e-05,
+ "loss": 1.4406,
+ "step": 1634
+ },
+ {
+ "epoch": 1.6354599731174393,
+ "grad_norm": 0.6703431606292725,
+ "learning_rate": 8.574021922282292e-05,
+ "loss": 1.423,
+ "step": 1635
+ },
+ {
+ "epoch": 1.636460254446563,
+ "grad_norm": 0.6874369382858276,
+ "learning_rate": 8.563647372876378e-05,
+ "loss": 1.3816,
+ "step": 1636
+ },
+ {
+ "epoch": 1.6374605357756868,
+ "grad_norm": 0.6975083351135254,
+ "learning_rate": 8.553274401762237e-05,
+ "loss": 1.3082,
+ "step": 1637
+ },
+ {
+ "epoch": 1.6384608171048107,
+ "grad_norm": 0.6560392379760742,
+ "learning_rate": 8.542903020337887e-05,
+ "loss": 1.4106,
+ "step": 1638
+ },
+ {
+ "epoch": 1.6394610984339346,
+ "grad_norm": 0.7531464099884033,
+ "learning_rate": 8.532533239999602e-05,
+ "loss": 1.1798,
+ "step": 1639
+ },
+ {
+ "epoch": 1.6404613797630585,
+ "grad_norm": 0.683595597743988,
+ "learning_rate": 8.522165072141897e-05,
+ "loss": 1.2502,
+ "step": 1640
+ },
+ {
+ "epoch": 1.6414616610921822,
+ "grad_norm": 0.6005716323852539,
+ "learning_rate": 8.511798528157512e-05,
+ "loss": 1.1569,
+ "step": 1641
+ },
+ {
+ "epoch": 1.6424619424213058,
+ "grad_norm": 0.5878857374191284,
+ "learning_rate": 8.501433619437403e-05,
+ "loss": 1.1993,
+ "step": 1642
+ },
+ {
+ "epoch": 1.6434622237504297,
+ "grad_norm": 0.6806159615516663,
+ "learning_rate": 8.49107035737073e-05,
+ "loss": 1.48,
+ "step": 1643
+ },
+ {
+ "epoch": 1.6444625050795536,
+ "grad_norm": 0.6241198182106018,
+ "learning_rate": 8.480708753344846e-05,
+ "loss": 1.5493,
+ "step": 1644
+ },
+ {
+ "epoch": 1.6454627864086775,
+ "grad_norm": 0.6669902205467224,
+ "learning_rate": 8.470348818745278e-05,
+ "loss": 1.3893,
+ "step": 1645
+ },
+ {
+ "epoch": 1.6464630677378014,
+ "grad_norm": 0.7545666694641113,
+ "learning_rate": 8.459990564955721e-05,
+ "loss": 1.2774,
+ "step": 1646
+ },
+ {
+ "epoch": 1.647463349066925,
+ "grad_norm": 0.7358554601669312,
+ "learning_rate": 8.449634003358022e-05,
+ "loss": 1.4932,
+ "step": 1647
+ },
+ {
+ "epoch": 1.648463630396049,
+ "grad_norm": 0.5833718180656433,
+ "learning_rate": 8.43927914533217e-05,
+ "loss": 1.447,
+ "step": 1648
+ },
+ {
+ "epoch": 1.6494639117251726,
+ "grad_norm": 0.7357178330421448,
+ "learning_rate": 8.428926002256283e-05,
+ "loss": 1.5369,
+ "step": 1649
+ },
+ {
+ "epoch": 1.6504641930542965,
+ "grad_norm": 0.6580341458320618,
+ "learning_rate": 8.418574585506591e-05,
+ "loss": 1.5551,
+ "step": 1650
+ },
+ {
+ "epoch": 1.6514644743834204,
+ "grad_norm": 0.637784481048584,
+ "learning_rate": 8.408224906457429e-05,
+ "loss": 1.3233,
+ "step": 1651
+ },
+ {
+ "epoch": 1.6524647557125443,
+ "grad_norm": 0.6412131190299988,
+ "learning_rate": 8.397876976481224e-05,
+ "loss": 1.15,
+ "step": 1652
+ },
+ {
+ "epoch": 1.653465037041668,
+ "grad_norm": 0.6500189900398254,
+ "learning_rate": 8.387530806948476e-05,
+ "loss": 1.3027,
+ "step": 1653
+ },
+ {
+ "epoch": 1.6544653183707918,
+ "grad_norm": 0.6529775857925415,
+ "learning_rate": 8.37718640922776e-05,
+ "loss": 1.4015,
+ "step": 1654
+ },
+ {
+ "epoch": 1.6554655996999155,
+ "grad_norm": 0.5686854720115662,
+ "learning_rate": 8.366843794685695e-05,
+ "loss": 1.3276,
+ "step": 1655
+ },
+ {
+ "epoch": 1.6564658810290394,
+ "grad_norm": 0.6397770643234253,
+ "learning_rate": 8.356502974686941e-05,
+ "loss": 1.2586,
+ "step": 1656
+ },
+ {
+ "epoch": 1.6574661623581632,
+ "grad_norm": 0.688079297542572,
+ "learning_rate": 8.346163960594193e-05,
+ "loss": 1.4179,
+ "step": 1657
+ },
+ {
+ "epoch": 1.6584664436872871,
+ "grad_norm": 0.8436989784240723,
+ "learning_rate": 8.335826763768156e-05,
+ "loss": 1.3527,
+ "step": 1658
+ },
+ {
+ "epoch": 1.659466725016411,
+ "grad_norm": 0.6351317763328552,
+ "learning_rate": 8.325491395567541e-05,
+ "loss": 1.2692,
+ "step": 1659
+ },
+ {
+ "epoch": 1.6604670063455347,
+ "grad_norm": 0.6486929059028625,
+ "learning_rate": 8.315157867349046e-05,
+ "loss": 1.3388,
+ "step": 1660
+ },
+ {
+ "epoch": 1.6614672876746583,
+ "grad_norm": 0.6507102847099304,
+ "learning_rate": 8.30482619046735e-05,
+ "loss": 1.3335,
+ "step": 1661
+ },
+ {
+ "epoch": 1.6624675690037822,
+ "grad_norm": 0.5884740948677063,
+ "learning_rate": 8.294496376275104e-05,
+ "loss": 1.3208,
+ "step": 1662
+ },
+ {
+ "epoch": 1.6634678503329061,
+ "grad_norm": 0.6564494371414185,
+ "learning_rate": 8.284168436122898e-05,
+ "loss": 1.164,
+ "step": 1663
+ },
+ {
+ "epoch": 1.66446813166203,
+ "grad_norm": 0.738129198551178,
+ "learning_rate": 8.273842381359273e-05,
+ "loss": 1.5263,
+ "step": 1664
+ },
+ {
+ "epoch": 1.665468412991154,
+ "grad_norm": 0.6807828545570374,
+ "learning_rate": 8.263518223330697e-05,
+ "loss": 1.3342,
+ "step": 1665
+ },
+ {
+ "epoch": 1.6664686943202776,
+ "grad_norm": 0.6691699624061584,
+ "learning_rate": 8.253195973381552e-05,
+ "loss": 1.3643,
+ "step": 1666
+ },
+ {
+ "epoch": 1.6674689756494014,
+ "grad_norm": 0.7294644117355347,
+ "learning_rate": 8.242875642854121e-05,
+ "loss": 1.3019,
+ "step": 1667
+ },
+ {
+ "epoch": 1.668469256978525,
+ "grad_norm": 0.9484224915504456,
+ "learning_rate": 8.232557243088585e-05,
+ "loss": 1.568,
+ "step": 1668
+ },
+ {
+ "epoch": 1.669469538307649,
+ "grad_norm": 0.6717697978019714,
+ "learning_rate": 8.222240785422996e-05,
+ "loss": 1.4427,
+ "step": 1669
+ },
+ {
+ "epoch": 1.6704698196367729,
+ "grad_norm": 0.6472289562225342,
+ "learning_rate": 8.211926281193277e-05,
+ "loss": 1.3165,
+ "step": 1670
+ },
+ {
+ "epoch": 1.6714701009658968,
+ "grad_norm": 0.6698246598243713,
+ "learning_rate": 8.201613741733203e-05,
+ "loss": 1.5418,
+ "step": 1671
+ },
+ {
+ "epoch": 1.6724703822950204,
+ "grad_norm": 0.6673927307128906,
+ "learning_rate": 8.191303178374389e-05,
+ "loss": 1.2539,
+ "step": 1672
+ },
+ {
+ "epoch": 1.6734706636241443,
+ "grad_norm": 0.659993588924408,
+ "learning_rate": 8.180994602446279e-05,
+ "loss": 1.4708,
+ "step": 1673
+ },
+ {
+ "epoch": 1.674470944953268,
+ "grad_norm": 0.6471976041793823,
+ "learning_rate": 8.170688025276134e-05,
+ "loss": 1.2014,
+ "step": 1674
+ },
+ {
+ "epoch": 1.6754712262823919,
+ "grad_norm": 0.6816028952598572,
+ "learning_rate": 8.160383458189022e-05,
+ "loss": 1.4187,
+ "step": 1675
+ },
+ {
+ "epoch": 1.6764715076115158,
+ "grad_norm": 0.747825562953949,
+ "learning_rate": 8.15008091250779e-05,
+ "loss": 1.4394,
+ "step": 1676
+ },
+ {
+ "epoch": 1.6774717889406396,
+ "grad_norm": 0.6439304947853088,
+ "learning_rate": 8.13978039955308e-05,
+ "loss": 1.3567,
+ "step": 1677
+ },
+ {
+ "epoch": 1.6784720702697635,
+ "grad_norm": 0.7007876634597778,
+ "learning_rate": 8.12948193064329e-05,
+ "loss": 1.3913,
+ "step": 1678
+ },
+ {
+ "epoch": 1.6794723515988872,
+ "grad_norm": 0.6919410228729248,
+ "learning_rate": 8.119185517094578e-05,
+ "loss": 1.217,
+ "step": 1679
+ },
+ {
+ "epoch": 1.6804726329280109,
+ "grad_norm": 0.59043288230896,
+ "learning_rate": 8.108891170220836e-05,
+ "loss": 1.2202,
+ "step": 1680
+ },
+ {
+ "epoch": 1.6814729142571347,
+ "grad_norm": 0.6019158363342285,
+ "learning_rate": 8.098598901333692e-05,
+ "loss": 1.2729,
+ "step": 1681
+ },
+ {
+ "epoch": 1.6824731955862586,
+ "grad_norm": 0.7911222577095032,
+ "learning_rate": 8.088308721742491e-05,
+ "loss": 1.3911,
+ "step": 1682
+ },
+ {
+ "epoch": 1.6834734769153825,
+ "grad_norm": 0.6759652495384216,
+ "learning_rate": 8.078020642754274e-05,
+ "loss": 1.3394,
+ "step": 1683
+ },
+ {
+ "epoch": 1.6844737582445064,
+ "grad_norm": 0.6729623675346375,
+ "learning_rate": 8.06773467567378e-05,
+ "loss": 1.2622,
+ "step": 1684
+ },
+ {
+ "epoch": 1.68547403957363,
+ "grad_norm": 0.7148420214653015,
+ "learning_rate": 8.057450831803428e-05,
+ "loss": 1.363,
+ "step": 1685
+ },
+ {
+ "epoch": 1.686474320902754,
+ "grad_norm": 0.6276561617851257,
+ "learning_rate": 8.047169122443302e-05,
+ "loss": 1.4551,
+ "step": 1686
+ },
+ {
+ "epoch": 1.6874746022318776,
+ "grad_norm": 0.6642428636550903,
+ "learning_rate": 8.036889558891142e-05,
+ "loss": 1.5254,
+ "step": 1687
+ },
+ {
+ "epoch": 1.6884748835610015,
+ "grad_norm": 0.6483539342880249,
+ "learning_rate": 8.026612152442329e-05,
+ "loss": 1.4619,
+ "step": 1688
+ },
+ {
+ "epoch": 1.6894751648901254,
+ "grad_norm": 0.799802303314209,
+ "learning_rate": 8.016336914389874e-05,
+ "loss": 1.5548,
+ "step": 1689
+ },
+ {
+ "epoch": 1.6904754462192493,
+ "grad_norm": 0.6891320943832397,
+ "learning_rate": 8.006063856024405e-05,
+ "loss": 1.3208,
+ "step": 1690
+ },
+ {
+ "epoch": 1.691475727548373,
+ "grad_norm": 0.6820452213287354,
+ "learning_rate": 7.995792988634152e-05,
+ "loss": 1.25,
+ "step": 1691
+ },
+ {
+ "epoch": 1.6924760088774968,
+ "grad_norm": 0.6455455422401428,
+ "learning_rate": 7.985524323504948e-05,
+ "loss": 1.4092,
+ "step": 1692
+ },
+ {
+ "epoch": 1.6934762902066205,
+ "grad_norm": 0.5509824156761169,
+ "learning_rate": 7.975257871920195e-05,
+ "loss": 1.2324,
+ "step": 1693
+ },
+ {
+ "epoch": 1.6944765715357444,
+ "grad_norm": 0.665798544883728,
+ "learning_rate": 7.964993645160866e-05,
+ "loss": 1.318,
+ "step": 1694
+ },
+ {
+ "epoch": 1.6954768528648683,
+ "grad_norm": 0.5200991034507751,
+ "learning_rate": 7.954731654505491e-05,
+ "loss": 1.1431,
+ "step": 1695
+ },
+ {
+ "epoch": 1.6964771341939922,
+ "grad_norm": 0.7394373416900635,
+ "learning_rate": 7.944471911230142e-05,
+ "loss": 1.5046,
+ "step": 1696
+ },
+ {
+ "epoch": 1.697477415523116,
+ "grad_norm": 0.7250887155532837,
+ "learning_rate": 7.93421442660842e-05,
+ "loss": 1.6707,
+ "step": 1697
+ },
+ {
+ "epoch": 1.6984776968522397,
+ "grad_norm": 0.6814633011817932,
+ "learning_rate": 7.923959211911449e-05,
+ "loss": 1.3375,
+ "step": 1698
+ },
+ {
+ "epoch": 1.6994779781813634,
+ "grad_norm": 0.8540093302726746,
+ "learning_rate": 7.91370627840785e-05,
+ "loss": 1.4843,
+ "step": 1699
+ },
+ {
+ "epoch": 1.7004782595104873,
+ "grad_norm": 0.596825122833252,
+ "learning_rate": 7.903455637363746e-05,
+ "loss": 1.2795,
+ "step": 1700
+ },
+ {
+ "epoch": 1.7014785408396111,
+ "grad_norm": 0.650389552116394,
+ "learning_rate": 7.89320730004274e-05,
+ "loss": 1.4164,
+ "step": 1701
+ },
+ {
+ "epoch": 1.702478822168735,
+ "grad_norm": 0.6001396179199219,
+ "learning_rate": 7.882961277705895e-05,
+ "loss": 1.3107,
+ "step": 1702
+ },
+ {
+ "epoch": 1.703479103497859,
+ "grad_norm": 0.6638504266738892,
+ "learning_rate": 7.872717581611741e-05,
+ "loss": 1.3454,
+ "step": 1703
+ },
+ {
+ "epoch": 1.7044793848269826,
+ "grad_norm": 0.6506242752075195,
+ "learning_rate": 7.862476223016246e-05,
+ "loss": 1.3095,
+ "step": 1704
+ },
+ {
+ "epoch": 1.7054796661561062,
+ "grad_norm": 0.6528734564781189,
+ "learning_rate": 7.852237213172812e-05,
+ "loss": 1.3354,
+ "step": 1705
+ },
+ {
+ "epoch": 1.7064799474852301,
+ "grad_norm": 0.6626534461975098,
+ "learning_rate": 7.842000563332254e-05,
+ "loss": 1.2478,
+ "step": 1706
+ },
+ {
+ "epoch": 1.707480228814354,
+ "grad_norm": 0.6849489212036133,
+ "learning_rate": 7.831766284742807e-05,
+ "loss": 1.4748,
+ "step": 1707
+ },
+ {
+ "epoch": 1.708480510143478,
+ "grad_norm": 0.6511324048042297,
+ "learning_rate": 7.82153438865009e-05,
+ "loss": 1.3912,
+ "step": 1708
+ },
+ {
+ "epoch": 1.7094807914726018,
+ "grad_norm": 0.610500156879425,
+ "learning_rate": 7.811304886297104e-05,
+ "loss": 1.5791,
+ "step": 1709
+ },
+ {
+ "epoch": 1.7104810728017255,
+ "grad_norm": 0.6765373945236206,
+ "learning_rate": 7.801077788924224e-05,
+ "loss": 1.3725,
+ "step": 1710
+ },
+ {
+ "epoch": 1.7114813541308493,
+ "grad_norm": 0.7581295371055603,
+ "learning_rate": 7.790853107769179e-05,
+ "loss": 1.4736,
+ "step": 1711
+ },
+ {
+ "epoch": 1.712481635459973,
+ "grad_norm": 0.7430140376091003,
+ "learning_rate": 7.780630854067045e-05,
+ "loss": 1.4055,
+ "step": 1712
+ },
+ {
+ "epoch": 1.713481916789097,
+ "grad_norm": 0.6995111107826233,
+ "learning_rate": 7.77041103905023e-05,
+ "loss": 1.4391,
+ "step": 1713
+ },
+ {
+ "epoch": 1.7144821981182208,
+ "grad_norm": 0.6253392100334167,
+ "learning_rate": 7.760193673948461e-05,
+ "loss": 1.3054,
+ "step": 1714
+ },
+ {
+ "epoch": 1.7154824794473447,
+ "grad_norm": 0.6570398211479187,
+ "learning_rate": 7.749978769988778e-05,
+ "loss": 1.328,
+ "step": 1715
+ },
+ {
+ "epoch": 1.7164827607764683,
+ "grad_norm": 0.6701228022575378,
+ "learning_rate": 7.739766338395511e-05,
+ "loss": 1.4463,
+ "step": 1716
+ },
+ {
+ "epoch": 1.7174830421055922,
+ "grad_norm": 0.6792857050895691,
+ "learning_rate": 7.729556390390275e-05,
+ "loss": 1.3868,
+ "step": 1717
+ },
+ {
+ "epoch": 1.7184833234347159,
+ "grad_norm": 0.6209396123886108,
+ "learning_rate": 7.719348937191957e-05,
+ "loss": 1.4349,
+ "step": 1718
+ },
+ {
+ "epoch": 1.7194836047638398,
+ "grad_norm": 0.6768544316291809,
+ "learning_rate": 7.709143990016702e-05,
+ "loss": 1.3351,
+ "step": 1719
+ },
+ {
+ "epoch": 1.7204838860929637,
+ "grad_norm": 0.6399651765823364,
+ "learning_rate": 7.698941560077899e-05,
+ "loss": 1.0941,
+ "step": 1720
+ },
+ {
+ "epoch": 1.7214841674220875,
+ "grad_norm": 0.727689802646637,
+ "learning_rate": 7.688741658586178e-05,
+ "loss": 1.6035,
+ "step": 1721
+ },
+ {
+ "epoch": 1.7224844487512114,
+ "grad_norm": 0.7155159711837769,
+ "learning_rate": 7.678544296749384e-05,
+ "loss": 1.3062,
+ "step": 1722
+ },
+ {
+ "epoch": 1.723484730080335,
+ "grad_norm": 0.7038417458534241,
+ "learning_rate": 7.668349485772572e-05,
+ "loss": 1.3564,
+ "step": 1723
+ },
+ {
+ "epoch": 1.7244850114094588,
+ "grad_norm": 0.6355061531066895,
+ "learning_rate": 7.658157236857999e-05,
+ "loss": 1.2889,
+ "step": 1724
+ },
+ {
+ "epoch": 1.7254852927385826,
+ "grad_norm": 0.6973921060562134,
+ "learning_rate": 7.6479675612051e-05,
+ "loss": 1.4768,
+ "step": 1725
+ },
+ {
+ "epoch": 1.7264855740677065,
+ "grad_norm": 0.6392905712127686,
+ "learning_rate": 7.637780470010487e-05,
+ "loss": 1.2648,
+ "step": 1726
+ },
+ {
+ "epoch": 1.7274858553968304,
+ "grad_norm": 0.6824831962585449,
+ "learning_rate": 7.62759597446793e-05,
+ "loss": 1.3659,
+ "step": 1727
+ },
+ {
+ "epoch": 1.7284861367259543,
+ "grad_norm": 0.6261188983917236,
+ "learning_rate": 7.617414085768351e-05,
+ "loss": 1.1169,
+ "step": 1728
+ },
+ {
+ "epoch": 1.729486418055078,
+ "grad_norm": 0.749819815158844,
+ "learning_rate": 7.607234815099802e-05,
+ "loss": 1.5689,
+ "step": 1729
+ },
+ {
+ "epoch": 1.7304866993842019,
+ "grad_norm": 0.6438270211219788,
+ "learning_rate": 7.597058173647458e-05,
+ "loss": 1.5213,
+ "step": 1730
+ },
+ {
+ "epoch": 1.7314869807133255,
+ "grad_norm": 0.6761091351509094,
+ "learning_rate": 7.586884172593609e-05,
+ "loss": 1.4573,
+ "step": 1731
+ },
+ {
+ "epoch": 1.7324872620424494,
+ "grad_norm": 0.7616665363311768,
+ "learning_rate": 7.576712823117645e-05,
+ "loss": 1.362,
+ "step": 1732
+ },
+ {
+ "epoch": 1.7334875433715733,
+ "grad_norm": 0.6882945895195007,
+ "learning_rate": 7.566544136396037e-05,
+ "loss": 1.3095,
+ "step": 1733
+ },
+ {
+ "epoch": 1.7344878247006972,
+ "grad_norm": 0.6496737003326416,
+ "learning_rate": 7.556378123602334e-05,
+ "loss": 1.7484,
+ "step": 1734
+ },
+ {
+ "epoch": 1.7354881060298208,
+ "grad_norm": 0.5761104822158813,
+ "learning_rate": 7.54621479590714e-05,
+ "loss": 1.1494,
+ "step": 1735
+ },
+ {
+ "epoch": 1.7364883873589447,
+ "grad_norm": 0.5761191248893738,
+ "learning_rate": 7.536054164478123e-05,
+ "loss": 1.235,
+ "step": 1736
+ },
+ {
+ "epoch": 1.7374886686880684,
+ "grad_norm": 0.6114856600761414,
+ "learning_rate": 7.525896240479976e-05,
+ "loss": 1.2359,
+ "step": 1737
+ },
+ {
+ "epoch": 1.7384889500171923,
+ "grad_norm": 0.6941234469413757,
+ "learning_rate": 7.51574103507442e-05,
+ "loss": 1.2464,
+ "step": 1738
+ },
+ {
+ "epoch": 1.7394892313463162,
+ "grad_norm": 0.8174465298652649,
+ "learning_rate": 7.505588559420189e-05,
+ "loss": 1.4083,
+ "step": 1739
+ },
+ {
+ "epoch": 1.74048951267544,
+ "grad_norm": 0.6795939207077026,
+ "learning_rate": 7.495438824673016e-05,
+ "loss": 1.1648,
+ "step": 1740
+ },
+ {
+ "epoch": 1.741489794004564,
+ "grad_norm": 0.7111831307411194,
+ "learning_rate": 7.485291841985626e-05,
+ "loss": 1.4835,
+ "step": 1741
+ },
+ {
+ "epoch": 1.7424900753336876,
+ "grad_norm": 0.6238039135932922,
+ "learning_rate": 7.475147622507717e-05,
+ "loss": 1.1049,
+ "step": 1742
+ },
+ {
+ "epoch": 1.7434903566628113,
+ "grad_norm": 0.6977236270904541,
+ "learning_rate": 7.465006177385953e-05,
+ "loss": 1.2669,
+ "step": 1743
+ },
+ {
+ "epoch": 1.7444906379919352,
+ "grad_norm": 0.5952944159507751,
+ "learning_rate": 7.454867517763948e-05,
+ "loss": 1.3034,
+ "step": 1744
+ },
+ {
+ "epoch": 1.745490919321059,
+ "grad_norm": 0.6380055546760559,
+ "learning_rate": 7.444731654782253e-05,
+ "loss": 1.348,
+ "step": 1745
+ },
+ {
+ "epoch": 1.746491200650183,
+ "grad_norm": 0.7760757207870483,
+ "learning_rate": 7.434598599578351e-05,
+ "loss": 1.4662,
+ "step": 1746
+ },
+ {
+ "epoch": 1.7474914819793068,
+ "grad_norm": 0.6411606073379517,
+ "learning_rate": 7.424468363286634e-05,
+ "loss": 1.3775,
+ "step": 1747
+ },
+ {
+ "epoch": 1.7484917633084305,
+ "grad_norm": 0.6461936235427856,
+ "learning_rate": 7.414340957038406e-05,
+ "loss": 1.53,
+ "step": 1748
+ },
+ {
+ "epoch": 1.7494920446375544,
+ "grad_norm": 0.6881558299064636,
+ "learning_rate": 7.404216391961847e-05,
+ "loss": 1.2593,
+ "step": 1749
+ },
+ {
+ "epoch": 1.750492325966678,
+ "grad_norm": 0.6212877631187439,
+ "learning_rate": 7.394094679182024e-05,
+ "loss": 1.3894,
+ "step": 1750
+ },
+ {
+ "epoch": 1.751492607295802,
+ "grad_norm": 0.6928493976593018,
+ "learning_rate": 7.383975829820874e-05,
+ "loss": 1.4044,
+ "step": 1751
+ },
+ {
+ "epoch": 1.7524928886249258,
+ "grad_norm": 0.8617000579833984,
+ "learning_rate": 7.37385985499718e-05,
+ "loss": 1.6135,
+ "step": 1752
+ },
+ {
+ "epoch": 1.7534931699540497,
+ "grad_norm": 0.5414397716522217,
+ "learning_rate": 7.36374676582657e-05,
+ "loss": 1.2033,
+ "step": 1753
+ },
+ {
+ "epoch": 1.7544934512831734,
+ "grad_norm": 0.6777795553207397,
+ "learning_rate": 7.353636573421496e-05,
+ "loss": 1.2382,
+ "step": 1754
+ },
+ {
+ "epoch": 1.7554937326122972,
+ "grad_norm": 0.6595777273178101,
+ "learning_rate": 7.343529288891239e-05,
+ "loss": 1.3224,
+ "step": 1755
+ },
+ {
+ "epoch": 1.756494013941421,
+ "grad_norm": 0.6468746066093445,
+ "learning_rate": 7.333424923341868e-05,
+ "loss": 1.3266,
+ "step": 1756
+ },
+ {
+ "epoch": 1.7574942952705448,
+ "grad_norm": 0.6333887577056885,
+ "learning_rate": 7.323323487876257e-05,
+ "loss": 1.2204,
+ "step": 1757
+ },
+ {
+ "epoch": 1.7584945765996687,
+ "grad_norm": 0.6916175484657288,
+ "learning_rate": 7.313224993594057e-05,
+ "loss": 1.2936,
+ "step": 1758
+ },
+ {
+ "epoch": 1.7594948579287926,
+ "grad_norm": 0.6844228506088257,
+ "learning_rate": 7.303129451591686e-05,
+ "loss": 1.3675,
+ "step": 1759
+ },
+ {
+ "epoch": 1.7604951392579165,
+ "grad_norm": 0.774632453918457,
+ "learning_rate": 7.29303687296232e-05,
+ "loss": 1.4389,
+ "step": 1760
+ },
+ {
+ "epoch": 1.7614954205870401,
+ "grad_norm": 0.648438572883606,
+ "learning_rate": 7.282947268795877e-05,
+ "loss": 1.3824,
+ "step": 1761
+ },
+ {
+ "epoch": 1.7624957019161638,
+ "grad_norm": 0.6045622229576111,
+ "learning_rate": 7.272860650179006e-05,
+ "loss": 1.5047,
+ "step": 1762
+ },
+ {
+ "epoch": 1.7634959832452877,
+ "grad_norm": 0.5664568543434143,
+ "learning_rate": 7.262777028195081e-05,
+ "loss": 1.2122,
+ "step": 1763
+ },
+ {
+ "epoch": 1.7644962645744116,
+ "grad_norm": 0.6108324527740479,
+ "learning_rate": 7.252696413924174e-05,
+ "loss": 1.1857,
+ "step": 1764
+ },
+ {
+ "epoch": 1.7654965459035354,
+ "grad_norm": 0.6443206667900085,
+ "learning_rate": 7.242618818443056e-05,
+ "loss": 1.3842,
+ "step": 1765
+ },
+ {
+ "epoch": 1.7664968272326593,
+ "grad_norm": 0.7583184242248535,
+ "learning_rate": 7.232544252825189e-05,
+ "loss": 1.2639,
+ "step": 1766
+ },
+ {
+ "epoch": 1.767497108561783,
+ "grad_norm": 1.4318912029266357,
+ "learning_rate": 7.222472728140695e-05,
+ "loss": 1.0818,
+ "step": 1767
+ },
+ {
+ "epoch": 1.7684973898909067,
+ "grad_norm": 0.7584578394889832,
+ "learning_rate": 7.212404255456357e-05,
+ "loss": 1.3726,
+ "step": 1768
+ },
+ {
+ "epoch": 1.7694976712200305,
+ "grad_norm": 0.6667199730873108,
+ "learning_rate": 7.202338845835606e-05,
+ "loss": 1.4032,
+ "step": 1769
+ },
+ {
+ "epoch": 1.7704979525491544,
+ "grad_norm": 0.6628596782684326,
+ "learning_rate": 7.192276510338507e-05,
+ "loss": 1.3554,
+ "step": 1770
+ },
+ {
+ "epoch": 1.7714982338782783,
+ "grad_norm": 0.6599582433700562,
+ "learning_rate": 7.182217260021749e-05,
+ "loss": 1.4631,
+ "step": 1771
+ },
+ {
+ "epoch": 1.7724985152074022,
+ "grad_norm": 0.6616823077201843,
+ "learning_rate": 7.172161105938624e-05,
+ "loss": 1.2651,
+ "step": 1772
+ },
+ {
+ "epoch": 1.7734987965365259,
+ "grad_norm": 0.7346659898757935,
+ "learning_rate": 7.162108059139032e-05,
+ "loss": 1.2884,
+ "step": 1773
+ },
+ {
+ "epoch": 1.7744990778656498,
+ "grad_norm": 0.7243573069572449,
+ "learning_rate": 7.15205813066945e-05,
+ "loss": 1.5171,
+ "step": 1774
+ },
+ {
+ "epoch": 1.7754993591947734,
+ "grad_norm": 0.7012961506843567,
+ "learning_rate": 7.142011331572936e-05,
+ "loss": 1.3929,
+ "step": 1775
+ },
+ {
+ "epoch": 1.7764996405238973,
+ "grad_norm": 0.7035613059997559,
+ "learning_rate": 7.131967672889101e-05,
+ "loss": 1.3625,
+ "step": 1776
+ },
+ {
+ "epoch": 1.7774999218530212,
+ "grad_norm": 0.6865414977073669,
+ "learning_rate": 7.121927165654109e-05,
+ "loss": 1.34,
+ "step": 1777
+ },
+ {
+ "epoch": 1.778500203182145,
+ "grad_norm": 0.6221490502357483,
+ "learning_rate": 7.111889820900664e-05,
+ "loss": 1.4009,
+ "step": 1778
+ },
+ {
+ "epoch": 1.7795004845112687,
+ "grad_norm": 0.7319238185882568,
+ "learning_rate": 7.101855649657991e-05,
+ "loss": 1.4471,
+ "step": 1779
+ },
+ {
+ "epoch": 1.7805007658403926,
+ "grad_norm": 0.6840189695358276,
+ "learning_rate": 7.091824662951827e-05,
+ "loss": 1.256,
+ "step": 1780
+ },
+ {
+ "epoch": 1.7815010471695163,
+ "grad_norm": 0.6095514297485352,
+ "learning_rate": 7.08179687180442e-05,
+ "loss": 1.2354,
+ "step": 1781
+ },
+ {
+ "epoch": 1.7825013284986402,
+ "grad_norm": 0.6160232424736023,
+ "learning_rate": 7.071772287234497e-05,
+ "loss": 1.2264,
+ "step": 1782
+ },
+ {
+ "epoch": 1.783501609827764,
+ "grad_norm": 0.8078712224960327,
+ "learning_rate": 7.06175092025726e-05,
+ "loss": 1.5183,
+ "step": 1783
+ },
+ {
+ "epoch": 1.784501891156888,
+ "grad_norm": 0.7782847881317139,
+ "learning_rate": 7.051732781884378e-05,
+ "loss": 1.3455,
+ "step": 1784
+ },
+ {
+ "epoch": 1.7855021724860118,
+ "grad_norm": 0.6435388326644897,
+ "learning_rate": 7.041717883123977e-05,
+ "loss": 1.3738,
+ "step": 1785
+ },
+ {
+ "epoch": 1.7865024538151355,
+ "grad_norm": 0.6329066157341003,
+ "learning_rate": 7.031706234980617e-05,
+ "loss": 1.3574,
+ "step": 1786
+ },
+ {
+ "epoch": 1.7875027351442592,
+ "grad_norm": 0.7085026502609253,
+ "learning_rate": 7.021697848455291e-05,
+ "loss": 1.4758,
+ "step": 1787
+ },
+ {
+ "epoch": 1.788503016473383,
+ "grad_norm": 0.6369369029998779,
+ "learning_rate": 7.011692734545403e-05,
+ "loss": 1.4166,
+ "step": 1788
+ },
+ {
+ "epoch": 1.789503297802507,
+ "grad_norm": 0.5777859687805176,
+ "learning_rate": 7.001690904244767e-05,
+ "loss": 1.2348,
+ "step": 1789
+ },
+ {
+ "epoch": 1.7905035791316308,
+ "grad_norm": 0.6327416896820068,
+ "learning_rate": 6.991692368543584e-05,
+ "loss": 1.3229,
+ "step": 1790
+ },
+ {
+ "epoch": 1.7915038604607547,
+ "grad_norm": 0.6739888191223145,
+ "learning_rate": 6.981697138428434e-05,
+ "loss": 1.3607,
+ "step": 1791
+ },
+ {
+ "epoch": 1.7925041417898784,
+ "grad_norm": 0.6856040954589844,
+ "learning_rate": 6.971705224882271e-05,
+ "loss": 1.3359,
+ "step": 1792
+ },
+ {
+ "epoch": 1.7935044231190023,
+ "grad_norm": 0.6209323406219482,
+ "learning_rate": 6.9617166388844e-05,
+ "loss": 1.1944,
+ "step": 1793
+ },
+ {
+ "epoch": 1.794504704448126,
+ "grad_norm": 0.6018400192260742,
+ "learning_rate": 6.951731391410468e-05,
+ "loss": 1.2403,
+ "step": 1794
+ },
+ {
+ "epoch": 1.7955049857772498,
+ "grad_norm": 0.7714535593986511,
+ "learning_rate": 6.94174949343246e-05,
+ "loss": 1.6654,
+ "step": 1795
+ },
+ {
+ "epoch": 1.7965052671063737,
+ "grad_norm": 0.6726022362709045,
+ "learning_rate": 6.931770955918674e-05,
+ "loss": 1.5234,
+ "step": 1796
+ },
+ {
+ "epoch": 1.7975055484354976,
+ "grad_norm": 0.6442826390266418,
+ "learning_rate": 6.921795789833723e-05,
+ "loss": 1.4253,
+ "step": 1797
+ },
+ {
+ "epoch": 1.7985058297646213,
+ "grad_norm": 0.6518099904060364,
+ "learning_rate": 6.911824006138503e-05,
+ "loss": 1.2595,
+ "step": 1798
+ },
+ {
+ "epoch": 1.7995061110937451,
+ "grad_norm": 0.6912478804588318,
+ "learning_rate": 6.901855615790206e-05,
+ "loss": 1.2241,
+ "step": 1799
+ },
+ {
+ "epoch": 1.8005063924228688,
+ "grad_norm": 0.7709338068962097,
+ "learning_rate": 6.891890629742288e-05,
+ "loss": 1.492,
+ "step": 1800
+ },
+ {
+ "epoch": 1.8015066737519927,
+ "grad_norm": 0.7332537770271301,
+ "learning_rate": 6.88192905894447e-05,
+ "loss": 1.2449,
+ "step": 1801
+ },
+ {
+ "epoch": 1.8025069550811166,
+ "grad_norm": 0.6881892681121826,
+ "learning_rate": 6.871970914342712e-05,
+ "loss": 1.5265,
+ "step": 1802
+ },
+ {
+ "epoch": 1.8035072364102405,
+ "grad_norm": 0.6630619764328003,
+ "learning_rate": 6.862016206879216e-05,
+ "loss": 1.4283,
+ "step": 1803
+ },
+ {
+ "epoch": 1.8045075177393644,
+ "grad_norm": 0.6601552367210388,
+ "learning_rate": 6.852064947492405e-05,
+ "loss": 1.296,
+ "step": 1804
+ },
+ {
+ "epoch": 1.805507799068488,
+ "grad_norm": 0.7896683216094971,
+ "learning_rate": 6.842117147116913e-05,
+ "loss": 1.3608,
+ "step": 1805
+ },
+ {
+ "epoch": 1.8065080803976117,
+ "grad_norm": 0.6206938624382019,
+ "learning_rate": 6.832172816683575e-05,
+ "loss": 1.2986,
+ "step": 1806
+ },
+ {
+ "epoch": 1.8075083617267356,
+ "grad_norm": 0.6818989515304565,
+ "learning_rate": 6.82223196711941e-05,
+ "loss": 1.3181,
+ "step": 1807
+ },
+ {
+ "epoch": 1.8085086430558595,
+ "grad_norm": 0.716175377368927,
+ "learning_rate": 6.812294609347615e-05,
+ "loss": 1.3468,
+ "step": 1808
+ },
+ {
+ "epoch": 1.8095089243849833,
+ "grad_norm": 0.6611238718032837,
+ "learning_rate": 6.802360754287547e-05,
+ "loss": 1.3609,
+ "step": 1809
+ },
+ {
+ "epoch": 1.8105092057141072,
+ "grad_norm": 0.637792706489563,
+ "learning_rate": 6.79243041285472e-05,
+ "loss": 1.3175,
+ "step": 1810
+ },
+ {
+ "epoch": 1.811509487043231,
+ "grad_norm": 0.6208909749984741,
+ "learning_rate": 6.782503595960782e-05,
+ "loss": 1.5203,
+ "step": 1811
+ },
+ {
+ "epoch": 1.8125097683723548,
+ "grad_norm": 0.8295003175735474,
+ "learning_rate": 6.772580314513508e-05,
+ "loss": 1.4409,
+ "step": 1812
+ },
+ {
+ "epoch": 1.8135100497014784,
+ "grad_norm": 0.6259089708328247,
+ "learning_rate": 6.762660579416791e-05,
+ "loss": 1.5972,
+ "step": 1813
+ },
+ {
+ "epoch": 1.8145103310306023,
+ "grad_norm": 0.6317711472511292,
+ "learning_rate": 6.752744401570625e-05,
+ "loss": 1.4977,
+ "step": 1814
+ },
+ {
+ "epoch": 1.8155106123597262,
+ "grad_norm": 0.6420071721076965,
+ "learning_rate": 6.742831791871096e-05,
+ "loss": 1.2393,
+ "step": 1815
+ },
+ {
+ "epoch": 1.81651089368885,
+ "grad_norm": 1.0690205097198486,
+ "learning_rate": 6.732922761210369e-05,
+ "loss": 1.1627,
+ "step": 1816
+ },
+ {
+ "epoch": 1.8175111750179738,
+ "grad_norm": 0.817659854888916,
+ "learning_rate": 6.723017320476679e-05,
+ "loss": 1.2888,
+ "step": 1817
+ },
+ {
+ "epoch": 1.8185114563470977,
+ "grad_norm": 0.5951409339904785,
+ "learning_rate": 6.713115480554313e-05,
+ "loss": 1.4495,
+ "step": 1818
+ },
+ {
+ "epoch": 1.8195117376762213,
+ "grad_norm": 0.6608357429504395,
+ "learning_rate": 6.7032172523236e-05,
+ "loss": 1.5617,
+ "step": 1819
+ },
+ {
+ "epoch": 1.8205120190053452,
+ "grad_norm": 0.5993407368659973,
+ "learning_rate": 6.693322646660906e-05,
+ "loss": 1.4812,
+ "step": 1820
+ },
+ {
+ "epoch": 1.821512300334469,
+ "grad_norm": 0.6179289817810059,
+ "learning_rate": 6.683431674438612e-05,
+ "loss": 1.2661,
+ "step": 1821
+ },
+ {
+ "epoch": 1.822512581663593,
+ "grad_norm": 0.6814618110656738,
+ "learning_rate": 6.673544346525107e-05,
+ "loss": 1.5599,
+ "step": 1822
+ },
+ {
+ "epoch": 1.8235128629927169,
+ "grad_norm": 0.7015717029571533,
+ "learning_rate": 6.663660673784777e-05,
+ "loss": 1.5481,
+ "step": 1823
+ },
+ {
+ "epoch": 1.8245131443218405,
+ "grad_norm": 0.6974764466285706,
+ "learning_rate": 6.653780667077985e-05,
+ "loss": 1.3893,
+ "step": 1824
+ },
+ {
+ "epoch": 1.8255134256509642,
+ "grad_norm": 0.614414393901825,
+ "learning_rate": 6.643904337261082e-05,
+ "loss": 1.4293,
+ "step": 1825
+ },
+ {
+ "epoch": 1.826513706980088,
+ "grad_norm": 0.6676987409591675,
+ "learning_rate": 6.634031695186362e-05,
+ "loss": 1.2622,
+ "step": 1826
+ },
+ {
+ "epoch": 1.827513988309212,
+ "grad_norm": 0.689335823059082,
+ "learning_rate": 6.624162751702076e-05,
+ "loss": 1.2908,
+ "step": 1827
+ },
+ {
+ "epoch": 1.8285142696383359,
+ "grad_norm": 0.6336010098457336,
+ "learning_rate": 6.614297517652409e-05,
+ "loss": 1.434,
+ "step": 1828
+ },
+ {
+ "epoch": 1.8295145509674597,
+ "grad_norm": 0.549472987651825,
+ "learning_rate": 6.604436003877464e-05,
+ "loss": 1.2401,
+ "step": 1829
+ },
+ {
+ "epoch": 1.8305148322965834,
+ "grad_norm": 0.6261748671531677,
+ "learning_rate": 6.594578221213265e-05,
+ "loss": 1.4202,
+ "step": 1830
+ },
+ {
+ "epoch": 1.831515113625707,
+ "grad_norm": 0.6431935429573059,
+ "learning_rate": 6.58472418049173e-05,
+ "loss": 1.4625,
+ "step": 1831
+ },
+ {
+ "epoch": 1.832515394954831,
+ "grad_norm": 0.726426362991333,
+ "learning_rate": 6.574873892540671e-05,
+ "loss": 1.4571,
+ "step": 1832
+ },
+ {
+ "epoch": 1.8335156762839548,
+ "grad_norm": 0.6399835348129272,
+ "learning_rate": 6.565027368183769e-05,
+ "loss": 1.5148,
+ "step": 1833
+ },
+ {
+ "epoch": 1.8345159576130787,
+ "grad_norm": 0.6948434114456177,
+ "learning_rate": 6.555184618240577e-05,
+ "loss": 1.3199,
+ "step": 1834
+ },
+ {
+ "epoch": 1.8355162389422026,
+ "grad_norm": 0.8539021611213684,
+ "learning_rate": 6.545345653526495e-05,
+ "loss": 1.4847,
+ "step": 1835
+ },
+ {
+ "epoch": 1.8365165202713263,
+ "grad_norm": 0.6419363617897034,
+ "learning_rate": 6.535510484852767e-05,
+ "loss": 1.4148,
+ "step": 1836
+ },
+ {
+ "epoch": 1.8375168016004502,
+ "grad_norm": 0.6716374158859253,
+ "learning_rate": 6.525679123026463e-05,
+ "loss": 1.3921,
+ "step": 1837
+ },
+ {
+ "epoch": 1.8385170829295738,
+ "grad_norm": 0.6869264841079712,
+ "learning_rate": 6.515851578850474e-05,
+ "loss": 1.5427,
+ "step": 1838
+ },
+ {
+ "epoch": 1.8395173642586977,
+ "grad_norm": 0.6462099552154541,
+ "learning_rate": 6.506027863123492e-05,
+ "loss": 1.44,
+ "step": 1839
+ },
+ {
+ "epoch": 1.8405176455878216,
+ "grad_norm": 0.7004585266113281,
+ "learning_rate": 6.496207986640004e-05,
+ "loss": 1.352,
+ "step": 1840
+ },
+ {
+ "epoch": 1.8415179269169455,
+ "grad_norm": 0.6404154896736145,
+ "learning_rate": 6.48639196019028e-05,
+ "loss": 1.309,
+ "step": 1841
+ },
+ {
+ "epoch": 1.8425182082460692,
+ "grad_norm": 0.6515551209449768,
+ "learning_rate": 6.476579794560356e-05,
+ "loss": 1.1967,
+ "step": 1842
+ },
+ {
+ "epoch": 1.843518489575193,
+ "grad_norm": 0.6698777675628662,
+ "learning_rate": 6.46677150053203e-05,
+ "loss": 1.4054,
+ "step": 1843
+ },
+ {
+ "epoch": 1.8445187709043167,
+ "grad_norm": 0.5269995927810669,
+ "learning_rate": 6.45696708888284e-05,
+ "loss": 1.3496,
+ "step": 1844
+ },
+ {
+ "epoch": 1.8455190522334406,
+ "grad_norm": 0.7120770215988159,
+ "learning_rate": 6.447166570386063e-05,
+ "loss": 1.449,
+ "step": 1845
+ },
+ {
+ "epoch": 1.8465193335625645,
+ "grad_norm": 0.6120896935462952,
+ "learning_rate": 6.437369955810699e-05,
+ "loss": 1.3471,
+ "step": 1846
+ },
+ {
+ "epoch": 1.8475196148916884,
+ "grad_norm": 0.7216696739196777,
+ "learning_rate": 6.42757725592145e-05,
+ "loss": 1.681,
+ "step": 1847
+ },
+ {
+ "epoch": 1.8485198962208123,
+ "grad_norm": 0.7460939288139343,
+ "learning_rate": 6.417788481478728e-05,
+ "loss": 1.3102,
+ "step": 1848
+ },
+ {
+ "epoch": 1.849520177549936,
+ "grad_norm": 0.6691422462463379,
+ "learning_rate": 6.40800364323862e-05,
+ "loss": 1.2691,
+ "step": 1849
+ },
+ {
+ "epoch": 1.8505204588790596,
+ "grad_norm": 0.5813978314399719,
+ "learning_rate": 6.398222751952899e-05,
+ "loss": 1.3182,
+ "step": 1850
+ },
+ {
+ "epoch": 1.8515207402081835,
+ "grad_norm": 0.7524621486663818,
+ "learning_rate": 6.388445818368991e-05,
+ "loss": 1.2682,
+ "step": 1851
+ },
+ {
+ "epoch": 1.8525210215373074,
+ "grad_norm": 0.6798551678657532,
+ "learning_rate": 6.378672853229981e-05,
+ "loss": 1.3795,
+ "step": 1852
+ },
+ {
+ "epoch": 1.8535213028664312,
+ "grad_norm": 0.6338953971862793,
+ "learning_rate": 6.368903867274585e-05,
+ "loss": 1.3801,
+ "step": 1853
+ },
+ {
+ "epoch": 1.8545215841955551,
+ "grad_norm": 0.6461024880409241,
+ "learning_rate": 6.35913887123716e-05,
+ "loss": 1.3359,
+ "step": 1854
+ },
+ {
+ "epoch": 1.8555218655246788,
+ "grad_norm": 0.7232131958007812,
+ "learning_rate": 6.34937787584767e-05,
+ "loss": 1.4629,
+ "step": 1855
+ },
+ {
+ "epoch": 1.8565221468538027,
+ "grad_norm": 0.6489596366882324,
+ "learning_rate": 6.339620891831678e-05,
+ "loss": 1.2078,
+ "step": 1856
+ },
+ {
+ "epoch": 1.8575224281829263,
+ "grad_norm": 0.5821114778518677,
+ "learning_rate": 6.329867929910347e-05,
+ "loss": 1.5002,
+ "step": 1857
+ },
+ {
+ "epoch": 1.8585227095120502,
+ "grad_norm": 0.5860056281089783,
+ "learning_rate": 6.32011900080042e-05,
+ "loss": 1.1737,
+ "step": 1858
+ },
+ {
+ "epoch": 1.8595229908411741,
+ "grad_norm": 0.5989000797271729,
+ "learning_rate": 6.310374115214204e-05,
+ "loss": 1.3945,
+ "step": 1859
+ },
+ {
+ "epoch": 1.860523272170298,
+ "grad_norm": 0.7010142803192139,
+ "learning_rate": 6.30063328385957e-05,
+ "loss": 1.4431,
+ "step": 1860
+ },
+ {
+ "epoch": 1.8615235534994217,
+ "grad_norm": 0.5994375348091125,
+ "learning_rate": 6.290896517439925e-05,
+ "loss": 1.5378,
+ "step": 1861
+ },
+ {
+ "epoch": 1.8625238348285456,
+ "grad_norm": 0.7813047170639038,
+ "learning_rate": 6.281163826654218e-05,
+ "loss": 1.4424,
+ "step": 1862
+ },
+ {
+ "epoch": 1.8635241161576692,
+ "grad_norm": 0.8812029361724854,
+ "learning_rate": 6.271435222196916e-05,
+ "loss": 1.4378,
+ "step": 1863
+ },
+ {
+ "epoch": 1.864524397486793,
+ "grad_norm": 0.7037883400917053,
+ "learning_rate": 6.261710714757994e-05,
+ "loss": 1.5074,
+ "step": 1864
+ },
+ {
+ "epoch": 1.865524678815917,
+ "grad_norm": 0.6375555396080017,
+ "learning_rate": 6.251990315022927e-05,
+ "loss": 1.5777,
+ "step": 1865
+ },
+ {
+ "epoch": 1.8665249601450409,
+ "grad_norm": 0.7071056365966797,
+ "learning_rate": 6.24227403367268e-05,
+ "loss": 1.5755,
+ "step": 1866
+ },
+ {
+ "epoch": 1.8675252414741648,
+ "grad_norm": 0.69529789686203,
+ "learning_rate": 6.232561881383687e-05,
+ "loss": 1.5843,
+ "step": 1867
+ },
+ {
+ "epoch": 1.8685255228032884,
+ "grad_norm": 0.7598209977149963,
+ "learning_rate": 6.222853868827839e-05,
+ "loss": 1.3657,
+ "step": 1868
+ },
+ {
+ "epoch": 1.869525804132412,
+ "grad_norm": 0.6517311930656433,
+ "learning_rate": 6.213150006672499e-05,
+ "loss": 1.5294,
+ "step": 1869
+ },
+ {
+ "epoch": 1.870526085461536,
+ "grad_norm": 0.5940656065940857,
+ "learning_rate": 6.20345030558045e-05,
+ "loss": 1.4061,
+ "step": 1870
+ },
+ {
+ "epoch": 1.8715263667906599,
+ "grad_norm": 0.6668642163276672,
+ "learning_rate": 6.193754776209911e-05,
+ "loss": 1.339,
+ "step": 1871
+ },
+ {
+ "epoch": 1.8725266481197838,
+ "grad_norm": 0.6001901626586914,
+ "learning_rate": 6.184063429214515e-05,
+ "loss": 1.2836,
+ "step": 1872
+ },
+ {
+ "epoch": 1.8735269294489076,
+ "grad_norm": 0.703733503818512,
+ "learning_rate": 6.174376275243299e-05,
+ "loss": 1.3441,
+ "step": 1873
+ },
+ {
+ "epoch": 1.8745272107780313,
+ "grad_norm": 0.5734414458274841,
+ "learning_rate": 6.164693324940694e-05,
+ "loss": 1.09,
+ "step": 1874
+ },
+ {
+ "epoch": 1.8755274921071552,
+ "grad_norm": 0.6441298127174377,
+ "learning_rate": 6.15501458894651e-05,
+ "loss": 1.2849,
+ "step": 1875
+ },
+ {
+ "epoch": 1.8765277734362789,
+ "grad_norm": 0.6523350477218628,
+ "learning_rate": 6.145340077895929e-05,
+ "loss": 1.4681,
+ "step": 1876
+ },
+ {
+ "epoch": 1.8775280547654027,
+ "grad_norm": 0.6061530113220215,
+ "learning_rate": 6.135669802419488e-05,
+ "loss": 1.3961,
+ "step": 1877
+ },
+ {
+ "epoch": 1.8785283360945266,
+ "grad_norm": 0.7068478465080261,
+ "learning_rate": 6.126003773143072e-05,
+ "loss": 1.2469,
+ "step": 1878
+ },
+ {
+ "epoch": 1.8795286174236505,
+ "grad_norm": 0.6139722466468811,
+ "learning_rate": 6.116342000687896e-05,
+ "loss": 1.4774,
+ "step": 1879
+ },
+ {
+ "epoch": 1.8805288987527742,
+ "grad_norm": 0.7666826248168945,
+ "learning_rate": 6.106684495670506e-05,
+ "loss": 1.4634,
+ "step": 1880
+ },
+ {
+ "epoch": 1.881529180081898,
+ "grad_norm": 0.5872985124588013,
+ "learning_rate": 6.097031268702746e-05,
+ "loss": 1.2302,
+ "step": 1881
+ },
+ {
+ "epoch": 1.8825294614110217,
+ "grad_norm": 0.6170175075531006,
+ "learning_rate": 6.087382330391774e-05,
+ "loss": 1.0561,
+ "step": 1882
+ },
+ {
+ "epoch": 1.8835297427401456,
+ "grad_norm": 0.7397921085357666,
+ "learning_rate": 6.077737691340023e-05,
+ "loss": 1.3574,
+ "step": 1883
+ },
+ {
+ "epoch": 1.8845300240692695,
+ "grad_norm": 0.5703612565994263,
+ "learning_rate": 6.0680973621452105e-05,
+ "loss": 1.2651,
+ "step": 1884
+ },
+ {
+ "epoch": 1.8855303053983934,
+ "grad_norm": 0.6688309907913208,
+ "learning_rate": 6.0584613534003144e-05,
+ "loss": 1.5202,
+ "step": 1885
+ },
+ {
+ "epoch": 1.8865305867275173,
+ "grad_norm": 0.6952941417694092,
+ "learning_rate": 6.0488296756935636e-05,
+ "loss": 1.3407,
+ "step": 1886
+ },
+ {
+ "epoch": 1.887530868056641,
+ "grad_norm": 0.6440621018409729,
+ "learning_rate": 6.039202339608432e-05,
+ "loss": 1.3836,
+ "step": 1887
+ },
+ {
+ "epoch": 1.8885311493857646,
+ "grad_norm": 0.7453868985176086,
+ "learning_rate": 6.0295793557236203e-05,
+ "loss": 1.3744,
+ "step": 1888
+ },
+ {
+ "epoch": 1.8895314307148885,
+ "grad_norm": 0.5936272740364075,
+ "learning_rate": 6.019960734613047e-05,
+ "loss": 1.2957,
+ "step": 1889
+ },
+ {
+ "epoch": 1.8905317120440124,
+ "grad_norm": 0.6652967929840088,
+ "learning_rate": 6.010346486845837e-05,
+ "loss": 1.3191,
+ "step": 1890
+ },
+ {
+ "epoch": 1.8915319933731363,
+ "grad_norm": 0.5736771821975708,
+ "learning_rate": 6.0007366229863117e-05,
+ "loss": 1.2785,
+ "step": 1891
+ },
+ {
+ "epoch": 1.8925322747022602,
+ "grad_norm": 0.6693833470344543,
+ "learning_rate": 5.991131153593971e-05,
+ "loss": 1.2598,
+ "step": 1892
+ },
+ {
+ "epoch": 1.8935325560313838,
+ "grad_norm": 0.6824096441268921,
+ "learning_rate": 5.981530089223489e-05,
+ "loss": 1.6205,
+ "step": 1893
+ },
+ {
+ "epoch": 1.8945328373605075,
+ "grad_norm": 0.6346132159233093,
+ "learning_rate": 5.971933440424703e-05,
+ "loss": 1.2442,
+ "step": 1894
+ },
+ {
+ "epoch": 1.8955331186896314,
+ "grad_norm": 0.6125045418739319,
+ "learning_rate": 5.9623412177425886e-05,
+ "loss": 1.3326,
+ "step": 1895
+ },
+ {
+ "epoch": 1.8965334000187553,
+ "grad_norm": 0.5784642696380615,
+ "learning_rate": 5.952753431717268e-05,
+ "loss": 1.2986,
+ "step": 1896
+ },
+ {
+ "epoch": 1.8975336813478791,
+ "grad_norm": 0.6546468138694763,
+ "learning_rate": 5.9431700928839805e-05,
+ "loss": 1.2985,
+ "step": 1897
+ },
+ {
+ "epoch": 1.898533962677003,
+ "grad_norm": 0.6771805286407471,
+ "learning_rate": 5.933591211773082e-05,
+ "loss": 1.3101,
+ "step": 1898
+ },
+ {
+ "epoch": 1.8995342440061267,
+ "grad_norm": 0.672447681427002,
+ "learning_rate": 5.924016798910037e-05,
+ "loss": 1.3677,
+ "step": 1899
+ },
+ {
+ "epoch": 1.9005345253352506,
+ "grad_norm": 0.6948104500770569,
+ "learning_rate": 5.914446864815388e-05,
+ "loss": 1.2893,
+ "step": 1900
+ },
+ {
+ "epoch": 1.9015348066643742,
+ "grad_norm": 0.6217272877693176,
+ "learning_rate": 5.9048814200047675e-05,
+ "loss": 1.3604,
+ "step": 1901
+ },
+ {
+ "epoch": 1.9025350879934981,
+ "grad_norm": 0.7615220546722412,
+ "learning_rate": 5.895320474988864e-05,
+ "loss": 1.4518,
+ "step": 1902
+ },
+ {
+ "epoch": 1.903535369322622,
+ "grad_norm": 0.6633756160736084,
+ "learning_rate": 5.885764040273426e-05,
+ "loss": 1.4675,
+ "step": 1903
+ },
+ {
+ "epoch": 1.904535650651746,
+ "grad_norm": 0.877419114112854,
+ "learning_rate": 5.876212126359251e-05,
+ "loss": 1.2659,
+ "step": 1904
+ },
+ {
+ "epoch": 1.9055359319808696,
+ "grad_norm": 0.6730911731719971,
+ "learning_rate": 5.866664743742162e-05,
+ "loss": 1.4772,
+ "step": 1905
+ },
+ {
+ "epoch": 1.9065362133099935,
+ "grad_norm": 0.6866170763969421,
+ "learning_rate": 5.857121902913008e-05,
+ "loss": 1.503,
+ "step": 1906
+ },
+ {
+ "epoch": 1.9075364946391171,
+ "grad_norm": 0.6140307784080505,
+ "learning_rate": 5.8475836143576433e-05,
+ "loss": 1.3118,
+ "step": 1907
+ },
+ {
+ "epoch": 1.908536775968241,
+ "grad_norm": 0.6074461340904236,
+ "learning_rate": 5.838049888556925e-05,
+ "loss": 1.2815,
+ "step": 1908
+ },
+ {
+ "epoch": 1.909537057297365,
+ "grad_norm": 0.6943862438201904,
+ "learning_rate": 5.8285207359866936e-05,
+ "loss": 1.2693,
+ "step": 1909
+ },
+ {
+ "epoch": 1.9105373386264888,
+ "grad_norm": 0.7455725073814392,
+ "learning_rate": 5.8189961671177574e-05,
+ "loss": 1.6509,
+ "step": 1910
+ },
+ {
+ "epoch": 1.9115376199556127,
+ "grad_norm": 0.645656943321228,
+ "learning_rate": 5.809476192415905e-05,
+ "loss": 1.3285,
+ "step": 1911
+ },
+ {
+ "epoch": 1.9125379012847363,
+ "grad_norm": 0.6280725002288818,
+ "learning_rate": 5.7999608223418534e-05,
+ "loss": 1.1409,
+ "step": 1912
+ },
+ {
+ "epoch": 1.91353818261386,
+ "grad_norm": 0.6084076762199402,
+ "learning_rate": 5.790450067351291e-05,
+ "loss": 1.4,
+ "step": 1913
+ },
+ {
+ "epoch": 1.9145384639429839,
+ "grad_norm": 0.5778687596321106,
+ "learning_rate": 5.780943937894805e-05,
+ "loss": 1.4359,
+ "step": 1914
+ },
+ {
+ "epoch": 1.9155387452721078,
+ "grad_norm": 0.5729363560676575,
+ "learning_rate": 5.771442444417918e-05,
+ "loss": 1.1936,
+ "step": 1915
+ },
+ {
+ "epoch": 1.9165390266012317,
+ "grad_norm": 0.5981405973434448,
+ "learning_rate": 5.761945597361054e-05,
+ "loss": 1.4219,
+ "step": 1916
+ },
+ {
+ "epoch": 1.9175393079303555,
+ "grad_norm": 0.6797769069671631,
+ "learning_rate": 5.752453407159522e-05,
+ "loss": 1.2791,
+ "step": 1917
+ },
+ {
+ "epoch": 1.9185395892594792,
+ "grad_norm": 0.6143385171890259,
+ "learning_rate": 5.742965884243532e-05,
+ "loss": 1.1946,
+ "step": 1918
+ },
+ {
+ "epoch": 1.919539870588603,
+ "grad_norm": 0.7437219023704529,
+ "learning_rate": 5.733483039038149e-05,
+ "loss": 1.4242,
+ "step": 1919
+ },
+ {
+ "epoch": 1.9205401519177268,
+ "grad_norm": 0.6434136033058167,
+ "learning_rate": 5.724004881963311e-05,
+ "loss": 1.3105,
+ "step": 1920
+ },
+ {
+ "epoch": 1.9215404332468506,
+ "grad_norm": 0.6449226140975952,
+ "learning_rate": 5.714531423433791e-05,
+ "loss": 1.3863,
+ "step": 1921
+ },
+ {
+ "epoch": 1.9225407145759745,
+ "grad_norm": 0.7470558881759644,
+ "learning_rate": 5.705062673859216e-05,
+ "loss": 1.3502,
+ "step": 1922
+ },
+ {
+ "epoch": 1.9235409959050984,
+ "grad_norm": 0.6595833897590637,
+ "learning_rate": 5.69559864364402e-05,
+ "loss": 1.4515,
+ "step": 1923
+ },
+ {
+ "epoch": 1.924541277234222,
+ "grad_norm": 0.6996669769287109,
+ "learning_rate": 5.6861393431874675e-05,
+ "loss": 1.5347,
+ "step": 1924
+ },
+ {
+ "epoch": 1.925541558563346,
+ "grad_norm": 0.6640759706497192,
+ "learning_rate": 5.676684782883615e-05,
+ "loss": 1.322,
+ "step": 1925
+ },
+ {
+ "epoch": 1.9265418398924696,
+ "grad_norm": 0.6044187545776367,
+ "learning_rate": 5.667234973121317e-05,
+ "loss": 1.3457,
+ "step": 1926
+ },
+ {
+ "epoch": 1.9275421212215935,
+ "grad_norm": 0.6154062747955322,
+ "learning_rate": 5.6577899242842025e-05,
+ "loss": 1.4132,
+ "step": 1927
+ },
+ {
+ "epoch": 1.9285424025507174,
+ "grad_norm": 0.730377733707428,
+ "learning_rate": 5.648349646750673e-05,
+ "loss": 1.4286,
+ "step": 1928
+ },
+ {
+ "epoch": 1.9295426838798413,
+ "grad_norm": 0.6812162399291992,
+ "learning_rate": 5.6389141508938903e-05,
+ "loss": 1.4494,
+ "step": 1929
+ },
+ {
+ "epoch": 1.9305429652089652,
+ "grad_norm": 0.6427568197250366,
+ "learning_rate": 5.629483447081751e-05,
+ "loss": 1.4093,
+ "step": 1930
+ },
+ {
+ "epoch": 1.9315432465380888,
+ "grad_norm": 0.672756016254425,
+ "learning_rate": 5.620057545676901e-05,
+ "loss": 1.3817,
+ "step": 1931
+ },
+ {
+ "epoch": 1.9325435278672125,
+ "grad_norm": 0.6241796016693115,
+ "learning_rate": 5.610636457036693e-05,
+ "loss": 1.3074,
+ "step": 1932
+ },
+ {
+ "epoch": 1.9335438091963364,
+ "grad_norm": 0.5853481888771057,
+ "learning_rate": 5.601220191513208e-05,
+ "loss": 1.3297,
+ "step": 1933
+ },
+ {
+ "epoch": 1.9345440905254603,
+ "grad_norm": 0.5953206419944763,
+ "learning_rate": 5.591808759453214e-05,
+ "loss": 1.4803,
+ "step": 1934
+ },
+ {
+ "epoch": 1.9355443718545842,
+ "grad_norm": 0.6578570008277893,
+ "learning_rate": 5.5824021711981686e-05,
+ "loss": 1.1907,
+ "step": 1935
+ },
+ {
+ "epoch": 1.936544653183708,
+ "grad_norm": 0.7336297035217285,
+ "learning_rate": 5.573000437084221e-05,
+ "loss": 1.3186,
+ "step": 1936
+ },
+ {
+ "epoch": 1.9375449345128317,
+ "grad_norm": 0.594570517539978,
+ "learning_rate": 5.563603567442168e-05,
+ "loss": 1.499,
+ "step": 1937
+ },
+ {
+ "epoch": 1.9385452158419556,
+ "grad_norm": 0.6666537523269653,
+ "learning_rate": 5.554211572597477e-05,
+ "loss": 1.4292,
+ "step": 1938
+ },
+ {
+ "epoch": 1.9395454971710793,
+ "grad_norm": 0.6429474353790283,
+ "learning_rate": 5.544824462870244e-05,
+ "loss": 1.5317,
+ "step": 1939
+ },
+ {
+ "epoch": 1.9405457785002032,
+ "grad_norm": 0.6204141974449158,
+ "learning_rate": 5.5354422485752125e-05,
+ "loss": 1.3496,
+ "step": 1940
+ },
+ {
+ "epoch": 1.941546059829327,
+ "grad_norm": 0.6017738580703735,
+ "learning_rate": 5.5260649400217326e-05,
+ "loss": 1.2879,
+ "step": 1941
+ },
+ {
+ "epoch": 1.942546341158451,
+ "grad_norm": 0.7276145219802856,
+ "learning_rate": 5.5166925475137735e-05,
+ "loss": 1.4061,
+ "step": 1942
+ },
+ {
+ "epoch": 1.9435466224875746,
+ "grad_norm": 0.5995808243751526,
+ "learning_rate": 5.507325081349903e-05,
+ "loss": 1.3676,
+ "step": 1943
+ },
+ {
+ "epoch": 1.9445469038166985,
+ "grad_norm": 0.7158801555633545,
+ "learning_rate": 5.497962551823266e-05,
+ "loss": 1.4506,
+ "step": 1944
+ },
+ {
+ "epoch": 1.9455471851458221,
+ "grad_norm": 0.7192680239677429,
+ "learning_rate": 5.488604969221597e-05,
+ "loss": 1.276,
+ "step": 1945
+ },
+ {
+ "epoch": 1.946547466474946,
+ "grad_norm": 0.5900127291679382,
+ "learning_rate": 5.479252343827178e-05,
+ "loss": 1.2548,
+ "step": 1946
+ },
+ {
+ "epoch": 1.94754774780407,
+ "grad_norm": 0.8271359801292419,
+ "learning_rate": 5.469904685916861e-05,
+ "loss": 1.2133,
+ "step": 1947
+ },
+ {
+ "epoch": 1.9485480291331938,
+ "grad_norm": 0.6850492358207703,
+ "learning_rate": 5.460562005762024e-05,
+ "loss": 1.4255,
+ "step": 1948
+ },
+ {
+ "epoch": 1.9495483104623177,
+ "grad_norm": 0.6316859722137451,
+ "learning_rate": 5.4512243136285915e-05,
+ "loss": 1.2897,
+ "step": 1949
+ },
+ {
+ "epoch": 1.9505485917914414,
+ "grad_norm": 0.6135215759277344,
+ "learning_rate": 5.441891619776987e-05,
+ "loss": 1.3193,
+ "step": 1950
+ },
+ {
+ "epoch": 1.951548873120565,
+ "grad_norm": 0.7005714178085327,
+ "learning_rate": 5.432563934462166e-05,
+ "loss": 1.4412,
+ "step": 1951
+ },
+ {
+ "epoch": 1.952549154449689,
+ "grad_norm": 0.6692869067192078,
+ "learning_rate": 5.423241267933557e-05,
+ "loss": 1.2523,
+ "step": 1952
+ },
+ {
+ "epoch": 1.9535494357788128,
+ "grad_norm": 0.5568224191665649,
+ "learning_rate": 5.4139236304350935e-05,
+ "loss": 1.3503,
+ "step": 1953
+ },
+ {
+ "epoch": 1.9545497171079367,
+ "grad_norm": 0.6143470406532288,
+ "learning_rate": 5.404611032205169e-05,
+ "loss": 1.4381,
+ "step": 1954
+ },
+ {
+ "epoch": 1.9555499984370606,
+ "grad_norm": 0.6193466782569885,
+ "learning_rate": 5.3953034834766416e-05,
+ "loss": 1.4821,
+ "step": 1955
+ },
+ {
+ "epoch": 1.9565502797661842,
+ "grad_norm": 0.6553237438201904,
+ "learning_rate": 5.386000994476832e-05,
+ "loss": 1.3022,
+ "step": 1956
+ },
+ {
+ "epoch": 1.957550561095308,
+ "grad_norm": 0.7644321918487549,
+ "learning_rate": 5.376703575427481e-05,
+ "loss": 1.444,
+ "step": 1957
+ },
+ {
+ "epoch": 1.9585508424244318,
+ "grad_norm": 0.720217227935791,
+ "learning_rate": 5.367411236544786e-05,
+ "loss": 1.46,
+ "step": 1958
+ },
+ {
+ "epoch": 1.9595511237535557,
+ "grad_norm": 0.6305975317955017,
+ "learning_rate": 5.3581239880393375e-05,
+ "loss": 1.5285,
+ "step": 1959
+ },
+ {
+ "epoch": 1.9605514050826796,
+ "grad_norm": 0.5860128998756409,
+ "learning_rate": 5.3488418401161475e-05,
+ "loss": 1.254,
+ "step": 1960
+ },
+ {
+ "epoch": 1.9615516864118034,
+ "grad_norm": 0.6627411842346191,
+ "learning_rate": 5.339564802974615e-05,
+ "loss": 1.2639,
+ "step": 1961
+ },
+ {
+ "epoch": 1.962551967740927,
+ "grad_norm": 0.614637017250061,
+ "learning_rate": 5.33029288680852e-05,
+ "loss": 1.3714,
+ "step": 1962
+ },
+ {
+ "epoch": 1.963552249070051,
+ "grad_norm": 0.6468274593353271,
+ "learning_rate": 5.321026101806032e-05,
+ "loss": 1.3802,
+ "step": 1963
+ },
+ {
+ "epoch": 1.9645525303991747,
+ "grad_norm": 0.6303175091743469,
+ "learning_rate": 5.311764458149664e-05,
+ "loss": 1.3505,
+ "step": 1964
+ },
+ {
+ "epoch": 1.9655528117282985,
+ "grad_norm": 0.6070892214775085,
+ "learning_rate": 5.302507966016295e-05,
+ "loss": 1.4039,
+ "step": 1965
+ },
+ {
+ "epoch": 1.9665530930574224,
+ "grad_norm": 0.6614121198654175,
+ "learning_rate": 5.293256635577126e-05,
+ "loss": 1.324,
+ "step": 1966
+ },
+ {
+ "epoch": 1.9675533743865463,
+ "grad_norm": 0.6081134676933289,
+ "learning_rate": 5.284010476997705e-05,
+ "loss": 1.2839,
+ "step": 1967
+ },
+ {
+ "epoch": 1.96855365571567,
+ "grad_norm": 0.707693338394165,
+ "learning_rate": 5.274769500437882e-05,
+ "loss": 1.4179,
+ "step": 1968
+ },
+ {
+ "epoch": 1.9695539370447939,
+ "grad_norm": 0.7113189697265625,
+ "learning_rate": 5.265533716051825e-05,
+ "loss": 1.1911,
+ "step": 1969
+ },
+ {
+ "epoch": 1.9705542183739175,
+ "grad_norm": 0.7755022048950195,
+ "learning_rate": 5.256303133987982e-05,
+ "loss": 1.3266,
+ "step": 1970
+ },
+ {
+ "epoch": 1.9715544997030414,
+ "grad_norm": 0.6285157203674316,
+ "learning_rate": 5.247077764389099e-05,
+ "loss": 1.2468,
+ "step": 1971
+ },
+ {
+ "epoch": 1.9725547810321653,
+ "grad_norm": 0.6110594272613525,
+ "learning_rate": 5.2378576173921934e-05,
+ "loss": 1.2845,
+ "step": 1972
+ },
+ {
+ "epoch": 1.9735550623612892,
+ "grad_norm": 0.7382394671440125,
+ "learning_rate": 5.22864270312853e-05,
+ "loss": 1.4531,
+ "step": 1973
+ },
+ {
+ "epoch": 1.974555343690413,
+ "grad_norm": 3.4922046661376953,
+ "learning_rate": 5.219433031723641e-05,
+ "loss": 1.174,
+ "step": 1974
+ },
+ {
+ "epoch": 1.9755556250195367,
+ "grad_norm": 0.5983368158340454,
+ "learning_rate": 5.210228613297281e-05,
+ "loss": 1.2157,
+ "step": 1975
+ },
+ {
+ "epoch": 1.9765559063486604,
+ "grad_norm": 0.6913344264030457,
+ "learning_rate": 5.201029457963451e-05,
+ "loss": 1.3994,
+ "step": 1976
+ },
+ {
+ "epoch": 1.9775561876777843,
+ "grad_norm": 0.5928197503089905,
+ "learning_rate": 5.191835575830352e-05,
+ "loss": 1.5831,
+ "step": 1977
+ },
+ {
+ "epoch": 1.9785564690069082,
+ "grad_norm": 0.6904213428497314,
+ "learning_rate": 5.1826469770004026e-05,
+ "loss": 1.3408,
+ "step": 1978
+ },
+ {
+ "epoch": 1.979556750336032,
+ "grad_norm": 0.6584596633911133,
+ "learning_rate": 5.1734636715702043e-05,
+ "loss": 1.3828,
+ "step": 1979
+ },
+ {
+ "epoch": 1.980557031665156,
+ "grad_norm": 0.7366130352020264,
+ "learning_rate": 5.1642856696305575e-05,
+ "loss": 1.5174,
+ "step": 1980
+ },
+ {
+ "epoch": 1.9815573129942796,
+ "grad_norm": 0.6994180679321289,
+ "learning_rate": 5.155112981266422e-05,
+ "loss": 1.6449,
+ "step": 1981
+ },
+ {
+ "epoch": 1.9825575943234035,
+ "grad_norm": 0.685383677482605,
+ "learning_rate": 5.145945616556921e-05,
+ "loss": 1.2966,
+ "step": 1982
+ },
+ {
+ "epoch": 1.9835578756525272,
+ "grad_norm": 0.6237232685089111,
+ "learning_rate": 5.136783585575336e-05,
+ "loss": 1.3721,
+ "step": 1983
+ },
+ {
+ "epoch": 1.984558156981651,
+ "grad_norm": 0.6665890216827393,
+ "learning_rate": 5.127626898389075e-05,
+ "loss": 1.3245,
+ "step": 1984
+ },
+ {
+ "epoch": 1.985558438310775,
+ "grad_norm": 0.607271134853363,
+ "learning_rate": 5.118475565059691e-05,
+ "loss": 1.1436,
+ "step": 1985
+ },
+ {
+ "epoch": 1.9865587196398988,
+ "grad_norm": 0.6433306336402893,
+ "learning_rate": 5.109329595642829e-05,
+ "loss": 1.521,
+ "step": 1986
+ },
+ {
+ "epoch": 1.9875590009690225,
+ "grad_norm": 0.6605822443962097,
+ "learning_rate": 5.1001890001882734e-05,
+ "loss": 1.3559,
+ "step": 1987
+ },
+ {
+ "epoch": 1.9885592822981464,
+ "grad_norm": 0.6377484202384949,
+ "learning_rate": 5.091053788739878e-05,
+ "loss": 1.3607,
+ "step": 1988
+ },
+ {
+ "epoch": 1.98955956362727,
+ "grad_norm": 0.6457136273384094,
+ "learning_rate": 5.081923971335582e-05,
+ "loss": 1.2594,
+ "step": 1989
+ },
+ {
+ "epoch": 1.990559844956394,
+ "grad_norm": 0.6249425411224365,
+ "learning_rate": 5.072799558007415e-05,
+ "loss": 1.4896,
+ "step": 1990
+ },
+ {
+ "epoch": 1.9915601262855178,
+ "grad_norm": 0.6000842452049255,
+ "learning_rate": 5.063680558781445e-05,
+ "loss": 1.1787,
+ "step": 1991
+ },
+ {
+ "epoch": 1.9925604076146417,
+ "grad_norm": 0.5997797250747681,
+ "learning_rate": 5.0545669836778144e-05,
+ "loss": 1.4464,
+ "step": 1992
+ },
+ {
+ "epoch": 1.9935606889437656,
+ "grad_norm": 0.665529191493988,
+ "learning_rate": 5.045458842710684e-05,
+ "loss": 1.5382,
+ "step": 1993
+ },
+ {
+ "epoch": 1.9945609702728893,
+ "grad_norm": 0.6204399466514587,
+ "learning_rate": 5.036356145888263e-05,
+ "loss": 1.218,
+ "step": 1994
+ },
+ {
+ "epoch": 1.995561251602013,
+ "grad_norm": 0.6627510786056519,
+ "learning_rate": 5.0272589032127594e-05,
+ "loss": 1.5239,
+ "step": 1995
+ },
+ {
+ "epoch": 1.9965615329311368,
+ "grad_norm": 0.708292543888092,
+ "learning_rate": 5.0181671246804064e-05,
+ "loss": 1.4107,
+ "step": 1996
+ },
+ {
+ "epoch": 1.9975618142602607,
+ "grad_norm": 0.5863770842552185,
+ "learning_rate": 5.009080820281415e-05,
+ "loss": 1.1877,
+ "step": 1997
+ },
+ {
+ "epoch": 1.9985620955893846,
+ "grad_norm": 0.5950385928153992,
+ "learning_rate": 5.000000000000002e-05,
+ "loss": 1.2165,
+ "step": 1998
+ },
+ {
+ "epoch": 1.9995623769185085,
+ "grad_norm": 0.6033083200454712,
+ "learning_rate": 4.990924673814336e-05,
+ "loss": 1.3102,
+ "step": 1999
+ },
+ {
+ "epoch": 2.0005626582476324,
+ "grad_norm": 0.5440614223480225,
+ "learning_rate": 4.981854851696568e-05,
+ "loss": 1.0143,
+ "step": 2000
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 2997,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 3,
+ "save_steps": 500,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 1.6185579427528704e+16,
+ "train_batch_size": 2,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-2000/training_args.bin b/checkpoint-2000/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..6831a6d623a8a2b84942bb5584c6aa5bc14eee51
--- /dev/null
+++ b/checkpoint-2000/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5db131d6e82df60077bab037ec35113e1b0836a0bd72bb0a21e3fc0311a527de
+size 5304
diff --git a/checkpoint-2500/config.json b/checkpoint-2500/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..49d4bd1e1961ef7daf9af9a7dbe539789d58d949
--- /dev/null
+++ b/checkpoint-2500/config.json
@@ -0,0 +1,35 @@
+{
+ "_name_or_path": "facebook/nllb-200-distilled-600M",
+ "activation_dropout": 0.0,
+ "activation_function": "relu",
+ "architectures": [
+ "M2M100ForConditionalGeneration"
+ ],
+ "attention_dropout": 0.1,
+ "bos_token_id": 0,
+ "d_model": 1024,
+ "decoder_attention_heads": 16,
+ "decoder_ffn_dim": 4096,
+ "decoder_layerdrop": 0,
+ "decoder_layers": 12,
+ "decoder_start_token_id": 2,
+ "dropout": 0.1,
+ "encoder_attention_heads": 16,
+ "encoder_ffn_dim": 4096,
+ "encoder_layerdrop": 0,
+ "encoder_layers": 12,
+ "eos_token_id": 2,
+ "init_std": 0.02,
+ "is_encoder_decoder": true,
+ "max_length": 200,
+ "max_position_embeddings": 1024,
+ "model_type": "m2m_100",
+ "num_hidden_layers": 12,
+ "pad_token_id": 1,
+ "scale_embedding": true,
+ "tokenizer_class": "NllbTokenizer",
+ "torch_dtype": "float32",
+ "transformers_version": "4.43.1",
+ "use_cache": true,
+ "vocab_size": 256206
+}
diff --git a/checkpoint-2500/generation_config.json b/checkpoint-2500/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..680d3e0504023804deeb427766576194a0f17d47
--- /dev/null
+++ b/checkpoint-2500/generation_config.json
@@ -0,0 +1,9 @@
+{
+ "_from_model_config": true,
+ "bos_token_id": 0,
+ "decoder_start_token_id": 2,
+ "eos_token_id": 2,
+ "max_length": 200,
+ "pad_token_id": 1,
+ "transformers_version": "4.43.1"
+}
diff --git a/checkpoint-2500/model.safetensors b/checkpoint-2500/model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..94c17d7b6d3a1d371ed0d99d1d0f1691ac840b53
--- /dev/null
+++ b/checkpoint-2500/model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a85682c60042e446f2d406505837a5e6b6466410aca52d0ed14d68f189763233
+size 2460354912
diff --git a/checkpoint-2500/optimizer.pt b/checkpoint-2500/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..2817a38494efc098c93292397a739fb0702e594b
--- /dev/null
+++ b/checkpoint-2500/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:080e068b2eea50c217619afa8aaa259a1b58851b56022a0a75677410ed6ab01b
+size 5125261
diff --git a/checkpoint-2500/rng_state.pth b/checkpoint-2500/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..52521a3c0a59cbbf1a08fe2c61903680eb711158
--- /dev/null
+++ b/checkpoint-2500/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:be94d73868c4570570ec06e62e12d79a131c26bfd47bc00de07ba8b9b32b46aa
+size 14244
diff --git a/checkpoint-2500/scheduler.pt b/checkpoint-2500/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..dd8e27122a81b9d9d7dc83f626b747d6b7fcdfbe
--- /dev/null
+++ b/checkpoint-2500/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:08e9362e46f924529c72e8a4fcf95c0e125b02b2be6e993db23bcdc91ebaad7c
+size 1064
diff --git a/checkpoint-2500/sentencepiece.bpe.model b/checkpoint-2500/sentencepiece.bpe.model
new file mode 100644
index 0000000000000000000000000000000000000000..dc2262d3e1d375b235eb71c24119c8e73f85d4ad
--- /dev/null
+++ b/checkpoint-2500/sentencepiece.bpe.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:14bb8dfb35c0ffdea7bc01e56cea38b9e3d5efcdcb9c251d6b40538e1aab555a
+size 4852054
diff --git a/checkpoint-2500/special_tokens_map.json b/checkpoint-2500/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..770c6f4e25faf27bbc3878b806f2ecfb88c5169e
--- /dev/null
+++ b/checkpoint-2500/special_tokens_map.json
@@ -0,0 +1,255 @@
+{
+ "additional_special_tokens": [
+ "ace_Arab",
+ "ace_Latn",
+ "acm_Arab",
+ "acq_Arab",
+ "aeb_Arab",
+ "afr_Latn",
+ "ajp_Arab",
+ "aka_Latn",
+ "amh_Ethi",
+ "apc_Arab",
+ "arb_Arab",
+ "ars_Arab",
+ "ary_Arab",
+ "arz_Arab",
+ "asm_Beng",
+ "ast_Latn",
+ "awa_Deva",
+ "ayr_Latn",
+ "azb_Arab",
+ "azj_Latn",
+ "bak_Cyrl",
+ "bam_Latn",
+ "ban_Latn",
+ "bel_Cyrl",
+ "bem_Latn",
+ "ben_Beng",
+ "bho_Deva",
+ "bjn_Arab",
+ "bjn_Latn",
+ "bod_Tibt",
+ "bos_Latn",
+ "bug_Latn",
+ "bul_Cyrl",
+ "cat_Latn",
+ "ceb_Latn",
+ "ces_Latn",
+ "cjk_Latn",
+ "ckb_Arab",
+ "crh_Latn",
+ "cym_Latn",
+ "dan_Latn",
+ "deu_Latn",
+ "dik_Latn",
+ "dyu_Latn",
+ "dzo_Tibt",
+ "ell_Grek",
+ "eng_Latn",
+ "epo_Latn",
+ "est_Latn",
+ "eus_Latn",
+ "ewe_Latn",
+ "fao_Latn",
+ "pes_Arab",
+ "fij_Latn",
+ "fin_Latn",
+ "fon_Latn",
+ "fra_Latn",
+ "fur_Latn",
+ "fuv_Latn",
+ "gla_Latn",
+ "gle_Latn",
+ "glg_Latn",
+ "grn_Latn",
+ "guj_Gujr",
+ "hat_Latn",
+ "hau_Latn",
+ "heb_Hebr",
+ "hin_Deva",
+ "hne_Deva",
+ "hrv_Latn",
+ "hun_Latn",
+ "hye_Armn",
+ "ibo_Latn",
+ "ilo_Latn",
+ "ind_Latn",
+ "isl_Latn",
+ "ita_Latn",
+ "jav_Latn",
+ "jpn_Jpan",
+ "kab_Latn",
+ "kac_Latn",
+ "kam_Latn",
+ "kan_Knda",
+ "kas_Arab",
+ "kas_Deva",
+ "kat_Geor",
+ "knc_Arab",
+ "knc_Latn",
+ "kaz_Cyrl",
+ "kbp_Latn",
+ "kea_Latn",
+ "khm_Khmr",
+ "kik_Latn",
+ "kin_Latn",
+ "kir_Cyrl",
+ "kmb_Latn",
+ "kon_Latn",
+ "kor_Hang",
+ "kmr_Latn",
+ "lao_Laoo",
+ "lvs_Latn",
+ "lij_Latn",
+ "lim_Latn",
+ "lin_Latn",
+ "lit_Latn",
+ "lmo_Latn",
+ "ltg_Latn",
+ "ltz_Latn",
+ "lua_Latn",
+ "lug_Latn",
+ "luo_Latn",
+ "lus_Latn",
+ "mag_Deva",
+ "mai_Deva",
+ "mal_Mlym",
+ "mar_Deva",
+ "min_Latn",
+ "mkd_Cyrl",
+ "plt_Latn",
+ "mlt_Latn",
+ "mni_Beng",
+ "khk_Cyrl",
+ "mos_Latn",
+ "mri_Latn",
+ "zsm_Latn",
+ "mya_Mymr",
+ "nld_Latn",
+ "nno_Latn",
+ "nob_Latn",
+ "npi_Deva",
+ "nso_Latn",
+ "nus_Latn",
+ "nya_Latn",
+ "oci_Latn",
+ "gaz_Latn",
+ "ory_Orya",
+ "pag_Latn",
+ "pan_Guru",
+ "pap_Latn",
+ "pol_Latn",
+ "por_Latn",
+ "prs_Arab",
+ "pbt_Arab",
+ "quy_Latn",
+ "ron_Latn",
+ "run_Latn",
+ "rus_Cyrl",
+ "sag_Latn",
+ "san_Deva",
+ "sat_Beng",
+ "scn_Latn",
+ "shn_Mymr",
+ "sin_Sinh",
+ "slk_Latn",
+ "slv_Latn",
+ "smo_Latn",
+ "sna_Latn",
+ "snd_Arab",
+ "som_Latn",
+ "sot_Latn",
+ "spa_Latn",
+ "als_Latn",
+ "srd_Latn",
+ "srp_Cyrl",
+ "ssw_Latn",
+ "sun_Latn",
+ "swe_Latn",
+ "swh_Latn",
+ "szl_Latn",
+ "tam_Taml",
+ "tat_Cyrl",
+ "tel_Telu",
+ "tgk_Cyrl",
+ "tgl_Latn",
+ "tha_Thai",
+ "tir_Ethi",
+ "taq_Latn",
+ "taq_Tfng",
+ "tpi_Latn",
+ "tsn_Latn",
+ "tso_Latn",
+ "tuk_Latn",
+ "tum_Latn",
+ "tur_Latn",
+ "twi_Latn",
+ "tzm_Tfng",
+ "uig_Arab",
+ "ukr_Cyrl",
+ "umb_Latn",
+ "urd_Arab",
+ "uzn_Latn",
+ "vec_Latn",
+ "vie_Latn",
+ "war_Latn",
+ "wol_Latn",
+ "xho_Latn",
+ "ydd_Hebr",
+ "yor_Latn",
+ "yue_Hant",
+ "zho_Hans",
+ "zho_Hant",
+ "zul_Latn"
+ ],
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "cls_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "mask_token": {
+ "content": "",
+ "lstrip": true,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "sep_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-2500/tokenizer.json b/checkpoint-2500/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..98050e98b98364c06d83b3f41864076220cb8408
--- /dev/null
+++ b/checkpoint-2500/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3b39b25b0763a1dd69dec54081fafcf10770d9f2538a3bd975a0c4be6d60a9c2
+size 17331294
diff --git a/checkpoint-2500/tokenizer_config.json b/checkpoint-2500/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..f1424d3657c008568198b44be241646482e7e9f2
--- /dev/null
+++ b/checkpoint-2500/tokenizer_config.json
@@ -0,0 +1,1878 @@
+{
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "3": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256001": {
+ "content": "ace_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256002": {
+ "content": "ace_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256003": {
+ "content": "acm_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256004": {
+ "content": "acq_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256005": {
+ "content": "aeb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256006": {
+ "content": "afr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256007": {
+ "content": "ajp_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256008": {
+ "content": "aka_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256009": {
+ "content": "amh_Ethi",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256010": {
+ "content": "apc_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256011": {
+ "content": "arb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256012": {
+ "content": "ars_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256013": {
+ "content": "ary_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256014": {
+ "content": "arz_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256015": {
+ "content": "asm_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256016": {
+ "content": "ast_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256017": {
+ "content": "awa_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256018": {
+ "content": "ayr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256019": {
+ "content": "azb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256020": {
+ "content": "azj_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256021": {
+ "content": "bak_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256022": {
+ "content": "bam_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256023": {
+ "content": "ban_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256024": {
+ "content": "bel_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256025": {
+ "content": "bem_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256026": {
+ "content": "ben_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256027": {
+ "content": "bho_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256028": {
+ "content": "bjn_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256029": {
+ "content": "bjn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256030": {
+ "content": "bod_Tibt",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256031": {
+ "content": "bos_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256032": {
+ "content": "bug_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256033": {
+ "content": "bul_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256034": {
+ "content": "cat_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256035": {
+ "content": "ceb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256036": {
+ "content": "ces_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256037": {
+ "content": "cjk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256038": {
+ "content": "ckb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256039": {
+ "content": "crh_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256040": {
+ "content": "cym_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256041": {
+ "content": "dan_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256042": {
+ "content": "deu_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256043": {
+ "content": "dik_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256044": {
+ "content": "dyu_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256045": {
+ "content": "dzo_Tibt",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256046": {
+ "content": "ell_Grek",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256047": {
+ "content": "eng_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256048": {
+ "content": "epo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256049": {
+ "content": "est_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256050": {
+ "content": "eus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256051": {
+ "content": "ewe_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256052": {
+ "content": "fao_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256053": {
+ "content": "pes_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256054": {
+ "content": "fij_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256055": {
+ "content": "fin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256056": {
+ "content": "fon_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256057": {
+ "content": "fra_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256058": {
+ "content": "fur_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256059": {
+ "content": "fuv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256060": {
+ "content": "gla_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256061": {
+ "content": "gle_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256062": {
+ "content": "glg_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256063": {
+ "content": "grn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256064": {
+ "content": "guj_Gujr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256065": {
+ "content": "hat_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256066": {
+ "content": "hau_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256067": {
+ "content": "heb_Hebr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256068": {
+ "content": "hin_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256069": {
+ "content": "hne_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256070": {
+ "content": "hrv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256071": {
+ "content": "hun_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256072": {
+ "content": "hye_Armn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256073": {
+ "content": "ibo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256074": {
+ "content": "ilo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256075": {
+ "content": "ind_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256076": {
+ "content": "isl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256077": {
+ "content": "ita_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256078": {
+ "content": "jav_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256079": {
+ "content": "jpn_Jpan",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256080": {
+ "content": "kab_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256081": {
+ "content": "kac_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256082": {
+ "content": "kam_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256083": {
+ "content": "kan_Knda",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256084": {
+ "content": "kas_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256085": {
+ "content": "kas_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256086": {
+ "content": "kat_Geor",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256087": {
+ "content": "knc_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256088": {
+ "content": "knc_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256089": {
+ "content": "kaz_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256090": {
+ "content": "kbp_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256091": {
+ "content": "kea_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256092": {
+ "content": "khm_Khmr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256093": {
+ "content": "kik_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256094": {
+ "content": "kin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256095": {
+ "content": "kir_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256096": {
+ "content": "kmb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256097": {
+ "content": "kon_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256098": {
+ "content": "kor_Hang",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256099": {
+ "content": "kmr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256100": {
+ "content": "lao_Laoo",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256101": {
+ "content": "lvs_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256102": {
+ "content": "lij_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256103": {
+ "content": "lim_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256104": {
+ "content": "lin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256105": {
+ "content": "lit_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256106": {
+ "content": "lmo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256107": {
+ "content": "ltg_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256108": {
+ "content": "ltz_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256109": {
+ "content": "lua_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256110": {
+ "content": "lug_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256111": {
+ "content": "luo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256112": {
+ "content": "lus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256113": {
+ "content": "mag_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256114": {
+ "content": "mai_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256115": {
+ "content": "mal_Mlym",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256116": {
+ "content": "mar_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256117": {
+ "content": "min_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256118": {
+ "content": "mkd_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256119": {
+ "content": "plt_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256120": {
+ "content": "mlt_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256121": {
+ "content": "mni_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256122": {
+ "content": "khk_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256123": {
+ "content": "mos_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256124": {
+ "content": "mri_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256125": {
+ "content": "zsm_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256126": {
+ "content": "mya_Mymr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256127": {
+ "content": "nld_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256128": {
+ "content": "nno_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256129": {
+ "content": "nob_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256130": {
+ "content": "npi_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256131": {
+ "content": "nso_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256132": {
+ "content": "nus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256133": {
+ "content": "nya_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256134": {
+ "content": "oci_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256135": {
+ "content": "gaz_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256136": {
+ "content": "ory_Orya",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256137": {
+ "content": "pag_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256138": {
+ "content": "pan_Guru",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256139": {
+ "content": "pap_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256140": {
+ "content": "pol_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256141": {
+ "content": "por_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256142": {
+ "content": "prs_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256143": {
+ "content": "pbt_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256144": {
+ "content": "quy_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256145": {
+ "content": "ron_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256146": {
+ "content": "run_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256147": {
+ "content": "rus_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256148": {
+ "content": "sag_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256149": {
+ "content": "san_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256150": {
+ "content": "sat_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256151": {
+ "content": "scn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256152": {
+ "content": "shn_Mymr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256153": {
+ "content": "sin_Sinh",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256154": {
+ "content": "slk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256155": {
+ "content": "slv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256156": {
+ "content": "smo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256157": {
+ "content": "sna_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256158": {
+ "content": "snd_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256159": {
+ "content": "som_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256160": {
+ "content": "sot_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256161": {
+ "content": "spa_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256162": {
+ "content": "als_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256163": {
+ "content": "srd_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256164": {
+ "content": "srp_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256165": {
+ "content": "ssw_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256166": {
+ "content": "sun_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256167": {
+ "content": "swe_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256168": {
+ "content": "swh_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256169": {
+ "content": "szl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256170": {
+ "content": "tam_Taml",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256171": {
+ "content": "tat_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256172": {
+ "content": "tel_Telu",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256173": {
+ "content": "tgk_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256174": {
+ "content": "tgl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256175": {
+ "content": "tha_Thai",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256176": {
+ "content": "tir_Ethi",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256177": {
+ "content": "taq_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256178": {
+ "content": "taq_Tfng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256179": {
+ "content": "tpi_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256180": {
+ "content": "tsn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256181": {
+ "content": "tso_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256182": {
+ "content": "tuk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256183": {
+ "content": "tum_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256184": {
+ "content": "tur_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256185": {
+ "content": "twi_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256186": {
+ "content": "tzm_Tfng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256187": {
+ "content": "uig_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256188": {
+ "content": "ukr_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256189": {
+ "content": "umb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256190": {
+ "content": "urd_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256191": {
+ "content": "uzn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256192": {
+ "content": "vec_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256193": {
+ "content": "vie_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256194": {
+ "content": "war_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256195": {
+ "content": "wol_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256196": {
+ "content": "xho_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256197": {
+ "content": "ydd_Hebr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256198": {
+ "content": "yor_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256199": {
+ "content": "yue_Hant",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256200": {
+ "content": "zho_Hans",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256201": {
+ "content": "zho_Hant",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256202": {
+ "content": "zul_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256203": {
+ "content": "",
+ "lstrip": true,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "additional_special_tokens": [
+ "ace_Arab",
+ "ace_Latn",
+ "acm_Arab",
+ "acq_Arab",
+ "aeb_Arab",
+ "afr_Latn",
+ "ajp_Arab",
+ "aka_Latn",
+ "amh_Ethi",
+ "apc_Arab",
+ "arb_Arab",
+ "ars_Arab",
+ "ary_Arab",
+ "arz_Arab",
+ "asm_Beng",
+ "ast_Latn",
+ "awa_Deva",
+ "ayr_Latn",
+ "azb_Arab",
+ "azj_Latn",
+ "bak_Cyrl",
+ "bam_Latn",
+ "ban_Latn",
+ "bel_Cyrl",
+ "bem_Latn",
+ "ben_Beng",
+ "bho_Deva",
+ "bjn_Arab",
+ "bjn_Latn",
+ "bod_Tibt",
+ "bos_Latn",
+ "bug_Latn",
+ "bul_Cyrl",
+ "cat_Latn",
+ "ceb_Latn",
+ "ces_Latn",
+ "cjk_Latn",
+ "ckb_Arab",
+ "crh_Latn",
+ "cym_Latn",
+ "dan_Latn",
+ "deu_Latn",
+ "dik_Latn",
+ "dyu_Latn",
+ "dzo_Tibt",
+ "ell_Grek",
+ "eng_Latn",
+ "epo_Latn",
+ "est_Latn",
+ "eus_Latn",
+ "ewe_Latn",
+ "fao_Latn",
+ "pes_Arab",
+ "fij_Latn",
+ "fin_Latn",
+ "fon_Latn",
+ "fra_Latn",
+ "fur_Latn",
+ "fuv_Latn",
+ "gla_Latn",
+ "gle_Latn",
+ "glg_Latn",
+ "grn_Latn",
+ "guj_Gujr",
+ "hat_Latn",
+ "hau_Latn",
+ "heb_Hebr",
+ "hin_Deva",
+ "hne_Deva",
+ "hrv_Latn",
+ "hun_Latn",
+ "hye_Armn",
+ "ibo_Latn",
+ "ilo_Latn",
+ "ind_Latn",
+ "isl_Latn",
+ "ita_Latn",
+ "jav_Latn",
+ "jpn_Jpan",
+ "kab_Latn",
+ "kac_Latn",
+ "kam_Latn",
+ "kan_Knda",
+ "kas_Arab",
+ "kas_Deva",
+ "kat_Geor",
+ "knc_Arab",
+ "knc_Latn",
+ "kaz_Cyrl",
+ "kbp_Latn",
+ "kea_Latn",
+ "khm_Khmr",
+ "kik_Latn",
+ "kin_Latn",
+ "kir_Cyrl",
+ "kmb_Latn",
+ "kon_Latn",
+ "kor_Hang",
+ "kmr_Latn",
+ "lao_Laoo",
+ "lvs_Latn",
+ "lij_Latn",
+ "lim_Latn",
+ "lin_Latn",
+ "lit_Latn",
+ "lmo_Latn",
+ "ltg_Latn",
+ "ltz_Latn",
+ "lua_Latn",
+ "lug_Latn",
+ "luo_Latn",
+ "lus_Latn",
+ "mag_Deva",
+ "mai_Deva",
+ "mal_Mlym",
+ "mar_Deva",
+ "min_Latn",
+ "mkd_Cyrl",
+ "plt_Latn",
+ "mlt_Latn",
+ "mni_Beng",
+ "khk_Cyrl",
+ "mos_Latn",
+ "mri_Latn",
+ "zsm_Latn",
+ "mya_Mymr",
+ "nld_Latn",
+ "nno_Latn",
+ "nob_Latn",
+ "npi_Deva",
+ "nso_Latn",
+ "nus_Latn",
+ "nya_Latn",
+ "oci_Latn",
+ "gaz_Latn",
+ "ory_Orya",
+ "pag_Latn",
+ "pan_Guru",
+ "pap_Latn",
+ "pol_Latn",
+ "por_Latn",
+ "prs_Arab",
+ "pbt_Arab",
+ "quy_Latn",
+ "ron_Latn",
+ "run_Latn",
+ "rus_Cyrl",
+ "sag_Latn",
+ "san_Deva",
+ "sat_Beng",
+ "scn_Latn",
+ "shn_Mymr",
+ "sin_Sinh",
+ "slk_Latn",
+ "slv_Latn",
+ "smo_Latn",
+ "sna_Latn",
+ "snd_Arab",
+ "som_Latn",
+ "sot_Latn",
+ "spa_Latn",
+ "als_Latn",
+ "srd_Latn",
+ "srp_Cyrl",
+ "ssw_Latn",
+ "sun_Latn",
+ "swe_Latn",
+ "swh_Latn",
+ "szl_Latn",
+ "tam_Taml",
+ "tat_Cyrl",
+ "tel_Telu",
+ "tgk_Cyrl",
+ "tgl_Latn",
+ "tha_Thai",
+ "tir_Ethi",
+ "taq_Latn",
+ "taq_Tfng",
+ "tpi_Latn",
+ "tsn_Latn",
+ "tso_Latn",
+ "tuk_Latn",
+ "tum_Latn",
+ "tur_Latn",
+ "twi_Latn",
+ "tzm_Tfng",
+ "uig_Arab",
+ "ukr_Cyrl",
+ "umb_Latn",
+ "urd_Arab",
+ "uzn_Latn",
+ "vec_Latn",
+ "vie_Latn",
+ "war_Latn",
+ "wol_Latn",
+ "xho_Latn",
+ "ydd_Hebr",
+ "yor_Latn",
+ "yue_Hant",
+ "zho_Hans",
+ "zho_Hant",
+ "zul_Latn"
+ ],
+ "bos_token": "",
+ "clean_up_tokenization_spaces": true,
+ "cls_token": "",
+ "eos_token": "",
+ "legacy_behaviour": false,
+ "mask_token": "",
+ "model_max_length": 1024,
+ "pad_token": "",
+ "sep_token": "",
+ "sp_model_kwargs": {},
+ "src_lang": "eng_Latn",
+ "tgt_lang": null,
+ "tokenizer_class": "NllbTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-2500/trainer_state.json b/checkpoint-2500/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..168e64660229197b703a07cec5a2b58fd92c0273
--- /dev/null
+++ b/checkpoint-2500/trainer_state.json
@@ -0,0 +1,17533 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 2.50070332280954,
+ "eval_steps": 500,
+ "global_step": 2500,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.001000281329123816,
+ "grad_norm": 7.503077030181885,
+ "learning_rate": 0.0001999999450590425,
+ "loss": 3.6513,
+ "step": 1
+ },
+ {
+ "epoch": 0.002000562658247632,
+ "grad_norm": 3.1191189289093018,
+ "learning_rate": 0.00019999978023623033,
+ "loss": 2.8683,
+ "step": 2
+ },
+ {
+ "epoch": 0.003000843987371448,
+ "grad_norm": 1.9282511472702026,
+ "learning_rate": 0.0001999995055317446,
+ "loss": 2.7882,
+ "step": 3
+ },
+ {
+ "epoch": 0.004001125316495264,
+ "grad_norm": 1.726026177406311,
+ "learning_rate": 0.00019999912094588717,
+ "loss": 2.7716,
+ "step": 4
+ },
+ {
+ "epoch": 0.005001406645619081,
+ "grad_norm": 1.4632996320724487,
+ "learning_rate": 0.00019999862647908064,
+ "loss": 2.6869,
+ "step": 5
+ },
+ {
+ "epoch": 0.006001687974742896,
+ "grad_norm": 1.5544543266296387,
+ "learning_rate": 0.00019999802213186834,
+ "loss": 2.6952,
+ "step": 6
+ },
+ {
+ "epoch": 0.007001969303866712,
+ "grad_norm": 1.5888980627059937,
+ "learning_rate": 0.0001999973079049143,
+ "loss": 2.1237,
+ "step": 7
+ },
+ {
+ "epoch": 0.008002250632990529,
+ "grad_norm": 1.8750641345977783,
+ "learning_rate": 0.00019999648379900338,
+ "loss": 2.3376,
+ "step": 8
+ },
+ {
+ "epoch": 0.009002531962114344,
+ "grad_norm": 1.0540648698806763,
+ "learning_rate": 0.0001999955498150411,
+ "loss": 2.4896,
+ "step": 9
+ },
+ {
+ "epoch": 0.010002813291238161,
+ "grad_norm": 1.0269274711608887,
+ "learning_rate": 0.00019999450595405374,
+ "loss": 2.1365,
+ "step": 10
+ },
+ {
+ "epoch": 0.011003094620361977,
+ "grad_norm": 1.0851730108261108,
+ "learning_rate": 0.0001999933522171883,
+ "loss": 2.235,
+ "step": 11
+ },
+ {
+ "epoch": 0.012003375949485792,
+ "grad_norm": 0.927042543888092,
+ "learning_rate": 0.00019999208860571255,
+ "loss": 2.2438,
+ "step": 12
+ },
+ {
+ "epoch": 0.01300365727860961,
+ "grad_norm": 1.3729208707809448,
+ "learning_rate": 0.00019999071512101496,
+ "loss": 2.0845,
+ "step": 13
+ },
+ {
+ "epoch": 0.014003938607733425,
+ "grad_norm": 1.1325910091400146,
+ "learning_rate": 0.00019998923176460474,
+ "loss": 2.0668,
+ "step": 14
+ },
+ {
+ "epoch": 0.01500421993685724,
+ "grad_norm": 0.9290457367897034,
+ "learning_rate": 0.00019998763853811184,
+ "loss": 2.0227,
+ "step": 15
+ },
+ {
+ "epoch": 0.016004501265981057,
+ "grad_norm": 0.942140519618988,
+ "learning_rate": 0.00019998593544328692,
+ "loss": 2.1598,
+ "step": 16
+ },
+ {
+ "epoch": 0.017004782595104875,
+ "grad_norm": 1.096635103225708,
+ "learning_rate": 0.00019998412248200138,
+ "loss": 2.1897,
+ "step": 17
+ },
+ {
+ "epoch": 0.01800506392422869,
+ "grad_norm": 1.1107186079025269,
+ "learning_rate": 0.00019998219965624734,
+ "loss": 2.0546,
+ "step": 18
+ },
+ {
+ "epoch": 0.019005345253352506,
+ "grad_norm": 0.9696593880653381,
+ "learning_rate": 0.0001999801669681376,
+ "loss": 2.0317,
+ "step": 19
+ },
+ {
+ "epoch": 0.020005626582476323,
+ "grad_norm": 0.9394300580024719,
+ "learning_rate": 0.00019997802441990573,
+ "loss": 2.2883,
+ "step": 20
+ },
+ {
+ "epoch": 0.021005907911600136,
+ "grad_norm": 1.08865225315094,
+ "learning_rate": 0.00019997577201390606,
+ "loss": 1.9838,
+ "step": 21
+ },
+ {
+ "epoch": 0.022006189240723954,
+ "grad_norm": 1.0712405443191528,
+ "learning_rate": 0.00019997340975261353,
+ "loss": 2.1177,
+ "step": 22
+ },
+ {
+ "epoch": 0.02300647056984777,
+ "grad_norm": 1.3190314769744873,
+ "learning_rate": 0.00019997093763862383,
+ "loss": 1.9755,
+ "step": 23
+ },
+ {
+ "epoch": 0.024006751898971584,
+ "grad_norm": 1.0659812688827515,
+ "learning_rate": 0.0001999683556746534,
+ "loss": 1.9829,
+ "step": 24
+ },
+ {
+ "epoch": 0.0250070332280954,
+ "grad_norm": 1.1824345588684082,
+ "learning_rate": 0.0001999656638635393,
+ "loss": 2.4219,
+ "step": 25
+ },
+ {
+ "epoch": 0.02600731455721922,
+ "grad_norm": 1.3446214199066162,
+ "learning_rate": 0.0001999628622082394,
+ "loss": 1.9644,
+ "step": 26
+ },
+ {
+ "epoch": 0.027007595886343033,
+ "grad_norm": 1.2527475357055664,
+ "learning_rate": 0.0001999599507118322,
+ "loss": 2.1889,
+ "step": 27
+ },
+ {
+ "epoch": 0.02800787721546685,
+ "grad_norm": 1.4738999605178833,
+ "learning_rate": 0.00019995692937751683,
+ "loss": 2.1949,
+ "step": 28
+ },
+ {
+ "epoch": 0.029008158544590667,
+ "grad_norm": 1.0533576011657715,
+ "learning_rate": 0.0001999537982086133,
+ "loss": 2.1034,
+ "step": 29
+ },
+ {
+ "epoch": 0.03000843987371448,
+ "grad_norm": 1.0343223810195923,
+ "learning_rate": 0.00019995055720856218,
+ "loss": 1.9561,
+ "step": 30
+ },
+ {
+ "epoch": 0.031008721202838298,
+ "grad_norm": 1.1149976253509521,
+ "learning_rate": 0.00019994720638092468,
+ "loss": 2.0981,
+ "step": 31
+ },
+ {
+ "epoch": 0.032009002531962115,
+ "grad_norm": 1.197178840637207,
+ "learning_rate": 0.00019994374572938277,
+ "loss": 2.1587,
+ "step": 32
+ },
+ {
+ "epoch": 0.03300928386108593,
+ "grad_norm": 0.9382303953170776,
+ "learning_rate": 0.00019994017525773913,
+ "loss": 1.869,
+ "step": 33
+ },
+ {
+ "epoch": 0.03400956519020975,
+ "grad_norm": 1.0526461601257324,
+ "learning_rate": 0.00019993649496991705,
+ "loss": 1.9045,
+ "step": 34
+ },
+ {
+ "epoch": 0.03500984651933356,
+ "grad_norm": 0.8510498404502869,
+ "learning_rate": 0.00019993270486996046,
+ "loss": 2.1005,
+ "step": 35
+ },
+ {
+ "epoch": 0.03601012784845738,
+ "grad_norm": 0.9990401268005371,
+ "learning_rate": 0.000199928804962034,
+ "loss": 1.8569,
+ "step": 36
+ },
+ {
+ "epoch": 0.037010409177581194,
+ "grad_norm": 0.9243854284286499,
+ "learning_rate": 0.00019992479525042303,
+ "loss": 1.9666,
+ "step": 37
+ },
+ {
+ "epoch": 0.03801069050670501,
+ "grad_norm": 0.7774227261543274,
+ "learning_rate": 0.00019992067573953342,
+ "loss": 2.0376,
+ "step": 38
+ },
+ {
+ "epoch": 0.03901097183582883,
+ "grad_norm": 0.8114833235740662,
+ "learning_rate": 0.0001999164464338918,
+ "loss": 2.1608,
+ "step": 39
+ },
+ {
+ "epoch": 0.040011253164952645,
+ "grad_norm": 0.8716320395469666,
+ "learning_rate": 0.0001999121073381454,
+ "loss": 2.0743,
+ "step": 40
+ },
+ {
+ "epoch": 0.041011534494076456,
+ "grad_norm": 0.9571239948272705,
+ "learning_rate": 0.0001999076584570621,
+ "loss": 2.0128,
+ "step": 41
+ },
+ {
+ "epoch": 0.04201181582320027,
+ "grad_norm": 1.038691520690918,
+ "learning_rate": 0.00019990309979553045,
+ "loss": 1.976,
+ "step": 42
+ },
+ {
+ "epoch": 0.04301209715232409,
+ "grad_norm": 1.0576292276382446,
+ "learning_rate": 0.00019989843135855958,
+ "loss": 1.94,
+ "step": 43
+ },
+ {
+ "epoch": 0.04401237848144791,
+ "grad_norm": 1.0991204977035522,
+ "learning_rate": 0.00019989365315127922,
+ "loss": 1.9397,
+ "step": 44
+ },
+ {
+ "epoch": 0.045012659810571724,
+ "grad_norm": 0.9268686175346375,
+ "learning_rate": 0.0001998887651789398,
+ "loss": 1.9305,
+ "step": 45
+ },
+ {
+ "epoch": 0.04601294113969554,
+ "grad_norm": 0.8459104299545288,
+ "learning_rate": 0.0001998837674469123,
+ "loss": 1.7941,
+ "step": 46
+ },
+ {
+ "epoch": 0.04701322246881936,
+ "grad_norm": 0.9260527491569519,
+ "learning_rate": 0.00019987865996068833,
+ "loss": 1.8843,
+ "step": 47
+ },
+ {
+ "epoch": 0.04801350379794317,
+ "grad_norm": 0.8370497226715088,
+ "learning_rate": 0.00019987344272588006,
+ "loss": 1.8779,
+ "step": 48
+ },
+ {
+ "epoch": 0.049013785127066986,
+ "grad_norm": 0.9228008389472961,
+ "learning_rate": 0.00019986811574822033,
+ "loss": 2.1713,
+ "step": 49
+ },
+ {
+ "epoch": 0.0500140664561908,
+ "grad_norm": 1.013746738433838,
+ "learning_rate": 0.00019986267903356254,
+ "loss": 2.1443,
+ "step": 50
+ },
+ {
+ "epoch": 0.05101434778531462,
+ "grad_norm": 1.0155737400054932,
+ "learning_rate": 0.0001998571325878806,
+ "loss": 1.9679,
+ "step": 51
+ },
+ {
+ "epoch": 0.05201462911443844,
+ "grad_norm": 0.9591345191001892,
+ "learning_rate": 0.0001998514764172691,
+ "loss": 2.0611,
+ "step": 52
+ },
+ {
+ "epoch": 0.053014910443562255,
+ "grad_norm": 0.9030050039291382,
+ "learning_rate": 0.00019984571052794313,
+ "loss": 1.9698,
+ "step": 53
+ },
+ {
+ "epoch": 0.054015191772686065,
+ "grad_norm": 0.7697799205780029,
+ "learning_rate": 0.00019983983492623833,
+ "loss": 2.0609,
+ "step": 54
+ },
+ {
+ "epoch": 0.05501547310180988,
+ "grad_norm": 0.8806005716323853,
+ "learning_rate": 0.00019983384961861096,
+ "loss": 1.9756,
+ "step": 55
+ },
+ {
+ "epoch": 0.0560157544309337,
+ "grad_norm": 0.9424449801445007,
+ "learning_rate": 0.0001998277546116378,
+ "loss": 2.0913,
+ "step": 56
+ },
+ {
+ "epoch": 0.05701603576005752,
+ "grad_norm": 1.139495849609375,
+ "learning_rate": 0.00019982154991201608,
+ "loss": 2.2524,
+ "step": 57
+ },
+ {
+ "epoch": 0.058016317089181334,
+ "grad_norm": 1.094347357749939,
+ "learning_rate": 0.00019981523552656377,
+ "loss": 1.8501,
+ "step": 58
+ },
+ {
+ "epoch": 0.05901659841830515,
+ "grad_norm": 1.1519278287887573,
+ "learning_rate": 0.00019980881146221914,
+ "loss": 1.9866,
+ "step": 59
+ },
+ {
+ "epoch": 0.06001687974742896,
+ "grad_norm": 1.2018250226974487,
+ "learning_rate": 0.00019980227772604112,
+ "loss": 1.8226,
+ "step": 60
+ },
+ {
+ "epoch": 0.06101716107655278,
+ "grad_norm": 0.9565753936767578,
+ "learning_rate": 0.0001997956343252091,
+ "loss": 1.8434,
+ "step": 61
+ },
+ {
+ "epoch": 0.062017442405676595,
+ "grad_norm": 1.0832768678665161,
+ "learning_rate": 0.00019978888126702296,
+ "loss": 2.1271,
+ "step": 62
+ },
+ {
+ "epoch": 0.06301772373480041,
+ "grad_norm": 0.8973837494850159,
+ "learning_rate": 0.00019978201855890308,
+ "loss": 1.8331,
+ "step": 63
+ },
+ {
+ "epoch": 0.06401800506392423,
+ "grad_norm": 0.8754604458808899,
+ "learning_rate": 0.00019977504620839035,
+ "loss": 2.1379,
+ "step": 64
+ },
+ {
+ "epoch": 0.06501828639304805,
+ "grad_norm": 0.8244839310646057,
+ "learning_rate": 0.00019976796422314615,
+ "loss": 1.8431,
+ "step": 65
+ },
+ {
+ "epoch": 0.06601856772217186,
+ "grad_norm": 0.8213551044464111,
+ "learning_rate": 0.00019976077261095226,
+ "loss": 1.9155,
+ "step": 66
+ },
+ {
+ "epoch": 0.06701884905129568,
+ "grad_norm": 0.9140985608100891,
+ "learning_rate": 0.00019975347137971098,
+ "loss": 2.0651,
+ "step": 67
+ },
+ {
+ "epoch": 0.0680191303804195,
+ "grad_norm": 0.8518921732902527,
+ "learning_rate": 0.00019974606053744503,
+ "loss": 1.8197,
+ "step": 68
+ },
+ {
+ "epoch": 0.06901941170954332,
+ "grad_norm": 0.8397145867347717,
+ "learning_rate": 0.00019973854009229763,
+ "loss": 1.8621,
+ "step": 69
+ },
+ {
+ "epoch": 0.07001969303866712,
+ "grad_norm": 0.8727964162826538,
+ "learning_rate": 0.00019973091005253232,
+ "loss": 1.762,
+ "step": 70
+ },
+ {
+ "epoch": 0.07101997436779094,
+ "grad_norm": 0.9284623265266418,
+ "learning_rate": 0.0001997231704265332,
+ "loss": 1.8675,
+ "step": 71
+ },
+ {
+ "epoch": 0.07202025569691475,
+ "grad_norm": 0.8280015587806702,
+ "learning_rate": 0.00019971532122280464,
+ "loss": 1.931,
+ "step": 72
+ },
+ {
+ "epoch": 0.07302053702603857,
+ "grad_norm": 0.7591394186019897,
+ "learning_rate": 0.0001997073624499716,
+ "loss": 1.8485,
+ "step": 73
+ },
+ {
+ "epoch": 0.07402081835516239,
+ "grad_norm": 0.975128710269928,
+ "learning_rate": 0.0001996992941167792,
+ "loss": 2.0784,
+ "step": 74
+ },
+ {
+ "epoch": 0.0750210996842862,
+ "grad_norm": 0.8034948110580444,
+ "learning_rate": 0.00019969111623209323,
+ "loss": 1.9849,
+ "step": 75
+ },
+ {
+ "epoch": 0.07602138101341002,
+ "grad_norm": 0.8540483713150024,
+ "learning_rate": 0.00019968282880489957,
+ "loss": 1.7832,
+ "step": 76
+ },
+ {
+ "epoch": 0.07702166234253384,
+ "grad_norm": 0.8181695342063904,
+ "learning_rate": 0.00019967443184430467,
+ "loss": 1.944,
+ "step": 77
+ },
+ {
+ "epoch": 0.07802194367165766,
+ "grad_norm": 0.8446747064590454,
+ "learning_rate": 0.0001996659253595353,
+ "loss": 1.8508,
+ "step": 78
+ },
+ {
+ "epoch": 0.07902222500078147,
+ "grad_norm": 0.8280364871025085,
+ "learning_rate": 0.0001996573093599385,
+ "loss": 1.843,
+ "step": 79
+ },
+ {
+ "epoch": 0.08002250632990529,
+ "grad_norm": 0.8016006350517273,
+ "learning_rate": 0.00019964858385498172,
+ "loss": 1.9368,
+ "step": 80
+ },
+ {
+ "epoch": 0.08102278765902911,
+ "grad_norm": 0.8450536131858826,
+ "learning_rate": 0.00019963974885425266,
+ "loss": 1.9736,
+ "step": 81
+ },
+ {
+ "epoch": 0.08202306898815291,
+ "grad_norm": 0.9172171950340271,
+ "learning_rate": 0.00019963080436745945,
+ "loss": 1.9382,
+ "step": 82
+ },
+ {
+ "epoch": 0.08302335031727673,
+ "grad_norm": 0.8581916689872742,
+ "learning_rate": 0.00019962175040443044,
+ "loss": 2.224,
+ "step": 83
+ },
+ {
+ "epoch": 0.08402363164640055,
+ "grad_norm": 0.9350367188453674,
+ "learning_rate": 0.0001996125869751143,
+ "loss": 2.0519,
+ "step": 84
+ },
+ {
+ "epoch": 0.08502391297552436,
+ "grad_norm": 0.9276247620582581,
+ "learning_rate": 0.00019960331408957997,
+ "loss": 1.9657,
+ "step": 85
+ },
+ {
+ "epoch": 0.08602419430464818,
+ "grad_norm": 0.871574342250824,
+ "learning_rate": 0.00019959393175801671,
+ "loss": 1.9399,
+ "step": 86
+ },
+ {
+ "epoch": 0.087024475633772,
+ "grad_norm": 1.0662888288497925,
+ "learning_rate": 0.00019958443999073397,
+ "loss": 1.9089,
+ "step": 87
+ },
+ {
+ "epoch": 0.08802475696289581,
+ "grad_norm": 0.8258713483810425,
+ "learning_rate": 0.00019957483879816151,
+ "loss": 1.839,
+ "step": 88
+ },
+ {
+ "epoch": 0.08902503829201963,
+ "grad_norm": 0.8154664039611816,
+ "learning_rate": 0.00019956512819084928,
+ "loss": 1.8409,
+ "step": 89
+ },
+ {
+ "epoch": 0.09002531962114345,
+ "grad_norm": 0.8584638833999634,
+ "learning_rate": 0.00019955530817946748,
+ "loss": 1.9521,
+ "step": 90
+ },
+ {
+ "epoch": 0.09102560095026727,
+ "grad_norm": 0.7917523384094238,
+ "learning_rate": 0.00019954537877480655,
+ "loss": 1.9495,
+ "step": 91
+ },
+ {
+ "epoch": 0.09202588227939108,
+ "grad_norm": 1.0129039287567139,
+ "learning_rate": 0.00019953533998777706,
+ "loss": 1.949,
+ "step": 92
+ },
+ {
+ "epoch": 0.0930261636085149,
+ "grad_norm": 0.8677986264228821,
+ "learning_rate": 0.00019952519182940993,
+ "loss": 1.7875,
+ "step": 93
+ },
+ {
+ "epoch": 0.09402644493763872,
+ "grad_norm": 0.8848614692687988,
+ "learning_rate": 0.00019951493431085603,
+ "loss": 2.0675,
+ "step": 94
+ },
+ {
+ "epoch": 0.09502672626676252,
+ "grad_norm": 0.9936463832855225,
+ "learning_rate": 0.00019950456744338658,
+ "loss": 1.6761,
+ "step": 95
+ },
+ {
+ "epoch": 0.09602700759588634,
+ "grad_norm": 1.0520148277282715,
+ "learning_rate": 0.00019949409123839288,
+ "loss": 2.081,
+ "step": 96
+ },
+ {
+ "epoch": 0.09702728892501016,
+ "grad_norm": 0.8061773180961609,
+ "learning_rate": 0.00019948350570738642,
+ "loss": 1.7281,
+ "step": 97
+ },
+ {
+ "epoch": 0.09802757025413397,
+ "grad_norm": 0.7642756104469299,
+ "learning_rate": 0.0001994728108619987,
+ "loss": 2.0032,
+ "step": 98
+ },
+ {
+ "epoch": 0.09902785158325779,
+ "grad_norm": 0.8541550040245056,
+ "learning_rate": 0.0001994620067139815,
+ "loss": 2.1136,
+ "step": 99
+ },
+ {
+ "epoch": 0.1000281329123816,
+ "grad_norm": 0.7868679761886597,
+ "learning_rate": 0.00019945109327520658,
+ "loss": 1.8695,
+ "step": 100
+ },
+ {
+ "epoch": 0.10102841424150542,
+ "grad_norm": 0.8776901364326477,
+ "learning_rate": 0.00019944007055766586,
+ "loss": 1.9786,
+ "step": 101
+ },
+ {
+ "epoch": 0.10202869557062924,
+ "grad_norm": 0.9013833999633789,
+ "learning_rate": 0.00019942893857347128,
+ "loss": 2.1466,
+ "step": 102
+ },
+ {
+ "epoch": 0.10302897689975306,
+ "grad_norm": 0.957558274269104,
+ "learning_rate": 0.00019941769733485494,
+ "loss": 2.0473,
+ "step": 103
+ },
+ {
+ "epoch": 0.10402925822887688,
+ "grad_norm": 0.8921108841896057,
+ "learning_rate": 0.00019940634685416888,
+ "loss": 1.7882,
+ "step": 104
+ },
+ {
+ "epoch": 0.10502953955800069,
+ "grad_norm": 0.896019697189331,
+ "learning_rate": 0.00019939488714388524,
+ "loss": 1.8811,
+ "step": 105
+ },
+ {
+ "epoch": 0.10602982088712451,
+ "grad_norm": 0.8792067766189575,
+ "learning_rate": 0.00019938331821659614,
+ "loss": 1.8624,
+ "step": 106
+ },
+ {
+ "epoch": 0.10703010221624833,
+ "grad_norm": 0.8739930391311646,
+ "learning_rate": 0.0001993716400850138,
+ "loss": 1.8105,
+ "step": 107
+ },
+ {
+ "epoch": 0.10803038354537213,
+ "grad_norm": 0.7678424715995789,
+ "learning_rate": 0.0001993598527619703,
+ "loss": 1.8772,
+ "step": 108
+ },
+ {
+ "epoch": 0.10903066487449595,
+ "grad_norm": 0.8718745112419128,
+ "learning_rate": 0.00019934795626041783,
+ "loss": 1.8236,
+ "step": 109
+ },
+ {
+ "epoch": 0.11003094620361976,
+ "grad_norm": 0.8467247486114502,
+ "learning_rate": 0.0001993359505934285,
+ "loss": 1.8188,
+ "step": 110
+ },
+ {
+ "epoch": 0.11103122753274358,
+ "grad_norm": 0.8685783743858337,
+ "learning_rate": 0.00019932383577419432,
+ "loss": 2.0775,
+ "step": 111
+ },
+ {
+ "epoch": 0.1120315088618674,
+ "grad_norm": 0.7799698710441589,
+ "learning_rate": 0.0001993116118160273,
+ "loss": 1.6489,
+ "step": 112
+ },
+ {
+ "epoch": 0.11303179019099122,
+ "grad_norm": 0.7900094389915466,
+ "learning_rate": 0.00019929927873235938,
+ "loss": 1.8332,
+ "step": 113
+ },
+ {
+ "epoch": 0.11403207152011503,
+ "grad_norm": 0.9433258771896362,
+ "learning_rate": 0.00019928683653674237,
+ "loss": 1.9331,
+ "step": 114
+ },
+ {
+ "epoch": 0.11503235284923885,
+ "grad_norm": 0.8861056566238403,
+ "learning_rate": 0.00019927428524284805,
+ "loss": 1.9135,
+ "step": 115
+ },
+ {
+ "epoch": 0.11603263417836267,
+ "grad_norm": 0.8566756844520569,
+ "learning_rate": 0.00019926162486446792,
+ "loss": 1.9874,
+ "step": 116
+ },
+ {
+ "epoch": 0.11703291550748648,
+ "grad_norm": 0.6897929310798645,
+ "learning_rate": 0.0001992488554155135,
+ "loss": 1.946,
+ "step": 117
+ },
+ {
+ "epoch": 0.1180331968366103,
+ "grad_norm": 0.7807729244232178,
+ "learning_rate": 0.00019923597691001615,
+ "loss": 1.8127,
+ "step": 118
+ },
+ {
+ "epoch": 0.11903347816573412,
+ "grad_norm": 0.7572523355484009,
+ "learning_rate": 0.0001992229893621269,
+ "loss": 1.7768,
+ "step": 119
+ },
+ {
+ "epoch": 0.12003375949485792,
+ "grad_norm": 0.7393172979354858,
+ "learning_rate": 0.00019920989278611687,
+ "loss": 1.894,
+ "step": 120
+ },
+ {
+ "epoch": 0.12103404082398174,
+ "grad_norm": 0.866576611995697,
+ "learning_rate": 0.0001991966871963767,
+ "loss": 1.9285,
+ "step": 121
+ },
+ {
+ "epoch": 0.12203432215310556,
+ "grad_norm": 0.7326533794403076,
+ "learning_rate": 0.000199183372607417,
+ "loss": 1.9309,
+ "step": 122
+ },
+ {
+ "epoch": 0.12303460348222937,
+ "grad_norm": 0.7655537724494934,
+ "learning_rate": 0.0001991699490338681,
+ "loss": 2.1145,
+ "step": 123
+ },
+ {
+ "epoch": 0.12403488481135319,
+ "grad_norm": 0.9714633226394653,
+ "learning_rate": 0.00019915641649048005,
+ "loss": 2.0341,
+ "step": 124
+ },
+ {
+ "epoch": 0.12503516614047702,
+ "grad_norm": 0.8542420864105225,
+ "learning_rate": 0.0001991427749921227,
+ "loss": 2.1426,
+ "step": 125
+ },
+ {
+ "epoch": 0.12603544746960083,
+ "grad_norm": 0.8286274671554565,
+ "learning_rate": 0.00019912902455378556,
+ "loss": 1.8452,
+ "step": 126
+ },
+ {
+ "epoch": 0.12703572879872463,
+ "grad_norm": 0.8823768496513367,
+ "learning_rate": 0.00019911516519057788,
+ "loss": 1.8651,
+ "step": 127
+ },
+ {
+ "epoch": 0.12803601012784846,
+ "grad_norm": 0.7301567196846008,
+ "learning_rate": 0.00019910119691772863,
+ "loss": 1.7776,
+ "step": 128
+ },
+ {
+ "epoch": 0.12903629145697226,
+ "grad_norm": 0.8402552604675293,
+ "learning_rate": 0.00019908711975058637,
+ "loss": 1.8617,
+ "step": 129
+ },
+ {
+ "epoch": 0.1300365727860961,
+ "grad_norm": 0.814500093460083,
+ "learning_rate": 0.0001990729337046194,
+ "loss": 1.9156,
+ "step": 130
+ },
+ {
+ "epoch": 0.1310368541152199,
+ "grad_norm": 0.8262699246406555,
+ "learning_rate": 0.0001990586387954156,
+ "loss": 1.8659,
+ "step": 131
+ },
+ {
+ "epoch": 0.13203713544434373,
+ "grad_norm": 0.8846324682235718,
+ "learning_rate": 0.00019904423503868247,
+ "loss": 2.043,
+ "step": 132
+ },
+ {
+ "epoch": 0.13303741677346753,
+ "grad_norm": 0.8757227659225464,
+ "learning_rate": 0.00019902972245024715,
+ "loss": 1.9217,
+ "step": 133
+ },
+ {
+ "epoch": 0.13403769810259136,
+ "grad_norm": 0.8476879596710205,
+ "learning_rate": 0.00019901510104605637,
+ "loss": 1.8892,
+ "step": 134
+ },
+ {
+ "epoch": 0.13503797943171517,
+ "grad_norm": 0.7707583904266357,
+ "learning_rate": 0.00019900037084217637,
+ "loss": 1.787,
+ "step": 135
+ },
+ {
+ "epoch": 0.136038260760839,
+ "grad_norm": 0.7389562129974365,
+ "learning_rate": 0.00019898553185479303,
+ "loss": 1.5854,
+ "step": 136
+ },
+ {
+ "epoch": 0.1370385420899628,
+ "grad_norm": 0.7331375479698181,
+ "learning_rate": 0.00019897058410021167,
+ "loss": 1.997,
+ "step": 137
+ },
+ {
+ "epoch": 0.13803882341908663,
+ "grad_norm": 0.7219388484954834,
+ "learning_rate": 0.00019895552759485722,
+ "loss": 1.8337,
+ "step": 138
+ },
+ {
+ "epoch": 0.13903910474821043,
+ "grad_norm": 0.8535702228546143,
+ "learning_rate": 0.00019894036235527395,
+ "loss": 1.7818,
+ "step": 139
+ },
+ {
+ "epoch": 0.14003938607733424,
+ "grad_norm": 0.7627841830253601,
+ "learning_rate": 0.00019892508839812584,
+ "loss": 1.8173,
+ "step": 140
+ },
+ {
+ "epoch": 0.14103966740645807,
+ "grad_norm": 0.8397619724273682,
+ "learning_rate": 0.00019890970574019617,
+ "loss": 1.8735,
+ "step": 141
+ },
+ {
+ "epoch": 0.14203994873558187,
+ "grad_norm": 0.8093482851982117,
+ "learning_rate": 0.00019889421439838763,
+ "loss": 1.9918,
+ "step": 142
+ },
+ {
+ "epoch": 0.1430402300647057,
+ "grad_norm": 0.8853684067726135,
+ "learning_rate": 0.00019887861438972246,
+ "loss": 1.5825,
+ "step": 143
+ },
+ {
+ "epoch": 0.1440405113938295,
+ "grad_norm": 0.7413788437843323,
+ "learning_rate": 0.00019886290573134228,
+ "loss": 1.9068,
+ "step": 144
+ },
+ {
+ "epoch": 0.14504079272295334,
+ "grad_norm": 0.7924477458000183,
+ "learning_rate": 0.000198847088440508,
+ "loss": 1.8582,
+ "step": 145
+ },
+ {
+ "epoch": 0.14604107405207714,
+ "grad_norm": 0.8679131865501404,
+ "learning_rate": 0.0001988311625346,
+ "loss": 1.7104,
+ "step": 146
+ },
+ {
+ "epoch": 0.14704135538120097,
+ "grad_norm": 0.7480150461196899,
+ "learning_rate": 0.00019881512803111796,
+ "loss": 1.7288,
+ "step": 147
+ },
+ {
+ "epoch": 0.14804163671032478,
+ "grad_norm": 0.8382390737533569,
+ "learning_rate": 0.00019879898494768093,
+ "loss": 1.8004,
+ "step": 148
+ },
+ {
+ "epoch": 0.1490419180394486,
+ "grad_norm": 0.7360037565231323,
+ "learning_rate": 0.00019878273330202717,
+ "loss": 1.85,
+ "step": 149
+ },
+ {
+ "epoch": 0.1500421993685724,
+ "grad_norm": 0.9644019603729248,
+ "learning_rate": 0.00019876637311201433,
+ "loss": 2.065,
+ "step": 150
+ },
+ {
+ "epoch": 0.15104248069769624,
+ "grad_norm": 0.8116248250007629,
+ "learning_rate": 0.00019874990439561934,
+ "loss": 1.702,
+ "step": 151
+ },
+ {
+ "epoch": 0.15204276202682004,
+ "grad_norm": 0.9301722645759583,
+ "learning_rate": 0.0001987333271709383,
+ "loss": 1.8089,
+ "step": 152
+ },
+ {
+ "epoch": 0.15304304335594385,
+ "grad_norm": 0.7991555333137512,
+ "learning_rate": 0.00019871664145618657,
+ "loss": 1.8227,
+ "step": 153
+ },
+ {
+ "epoch": 0.15404332468506768,
+ "grad_norm": 0.8676092028617859,
+ "learning_rate": 0.00019869984726969878,
+ "loss": 1.7253,
+ "step": 154
+ },
+ {
+ "epoch": 0.15504360601419148,
+ "grad_norm": 0.8022972941398621,
+ "learning_rate": 0.00019868294462992866,
+ "loss": 1.8766,
+ "step": 155
+ },
+ {
+ "epoch": 0.1560438873433153,
+ "grad_norm": 1.128886103630066,
+ "learning_rate": 0.00019866593355544922,
+ "loss": 2.0197,
+ "step": 156
+ },
+ {
+ "epoch": 0.15704416867243912,
+ "grad_norm": 0.7420483827590942,
+ "learning_rate": 0.00019864881406495246,
+ "loss": 1.8825,
+ "step": 157
+ },
+ {
+ "epoch": 0.15804445000156295,
+ "grad_norm": 0.7797536849975586,
+ "learning_rate": 0.00019863158617724967,
+ "loss": 1.8892,
+ "step": 158
+ },
+ {
+ "epoch": 0.15904473133068675,
+ "grad_norm": 0.6859965324401855,
+ "learning_rate": 0.00019861424991127115,
+ "loss": 1.8424,
+ "step": 159
+ },
+ {
+ "epoch": 0.16004501265981058,
+ "grad_norm": 0.8115108609199524,
+ "learning_rate": 0.00019859680528606637,
+ "loss": 1.8394,
+ "step": 160
+ },
+ {
+ "epoch": 0.16104529398893438,
+ "grad_norm": 0.9756322503089905,
+ "learning_rate": 0.00019857925232080373,
+ "loss": 1.726,
+ "step": 161
+ },
+ {
+ "epoch": 0.16204557531805822,
+ "grad_norm": 0.8894350528717041,
+ "learning_rate": 0.00019856159103477086,
+ "loss": 1.8893,
+ "step": 162
+ },
+ {
+ "epoch": 0.16304585664718202,
+ "grad_norm": 0.8075819611549377,
+ "learning_rate": 0.00019854382144737426,
+ "loss": 1.6596,
+ "step": 163
+ },
+ {
+ "epoch": 0.16404613797630582,
+ "grad_norm": 0.8861923813819885,
+ "learning_rate": 0.00019852594357813952,
+ "loss": 1.9352,
+ "step": 164
+ },
+ {
+ "epoch": 0.16504641930542965,
+ "grad_norm": 0.8511936068534851,
+ "learning_rate": 0.00019850795744671116,
+ "loss": 1.9416,
+ "step": 165
+ },
+ {
+ "epoch": 0.16604670063455346,
+ "grad_norm": 0.9425658583641052,
+ "learning_rate": 0.0001984898630728527,
+ "loss": 1.9081,
+ "step": 166
+ },
+ {
+ "epoch": 0.1670469819636773,
+ "grad_norm": 0.7502055168151855,
+ "learning_rate": 0.0001984716604764466,
+ "loss": 1.703,
+ "step": 167
+ },
+ {
+ "epoch": 0.1680472632928011,
+ "grad_norm": 0.9135978817939758,
+ "learning_rate": 0.0001984533496774942,
+ "loss": 1.7641,
+ "step": 168
+ },
+ {
+ "epoch": 0.16904754462192492,
+ "grad_norm": 0.7768126726150513,
+ "learning_rate": 0.0001984349306961158,
+ "loss": 1.7053,
+ "step": 169
+ },
+ {
+ "epoch": 0.17004782595104873,
+ "grad_norm": 0.8106538653373718,
+ "learning_rate": 0.00019841640355255043,
+ "loss": 1.8646,
+ "step": 170
+ },
+ {
+ "epoch": 0.17104810728017256,
+ "grad_norm": 0.7872330546379089,
+ "learning_rate": 0.00019839776826715614,
+ "loss": 1.7814,
+ "step": 171
+ },
+ {
+ "epoch": 0.17204838860929636,
+ "grad_norm": 0.869532585144043,
+ "learning_rate": 0.00019837902486040978,
+ "loss": 1.7812,
+ "step": 172
+ },
+ {
+ "epoch": 0.1730486699384202,
+ "grad_norm": 1.015028715133667,
+ "learning_rate": 0.0001983601733529069,
+ "loss": 1.9432,
+ "step": 173
+ },
+ {
+ "epoch": 0.174048951267544,
+ "grad_norm": 0.800183117389679,
+ "learning_rate": 0.00019834121376536187,
+ "loss": 1.758,
+ "step": 174
+ },
+ {
+ "epoch": 0.17504923259666783,
+ "grad_norm": 0.7427104711532593,
+ "learning_rate": 0.00019832214611860793,
+ "loss": 1.6476,
+ "step": 175
+ },
+ {
+ "epoch": 0.17604951392579163,
+ "grad_norm": 0.8289130926132202,
+ "learning_rate": 0.00019830297043359692,
+ "loss": 1.7702,
+ "step": 176
+ },
+ {
+ "epoch": 0.17704979525491543,
+ "grad_norm": 0.8298771977424622,
+ "learning_rate": 0.00019828368673139947,
+ "loss": 1.7515,
+ "step": 177
+ },
+ {
+ "epoch": 0.17805007658403926,
+ "grad_norm": 0.7602815628051758,
+ "learning_rate": 0.0001982642950332049,
+ "loss": 1.7545,
+ "step": 178
+ },
+ {
+ "epoch": 0.17905035791316307,
+ "grad_norm": 0.8110321164131165,
+ "learning_rate": 0.00019824479536032112,
+ "loss": 2.2604,
+ "step": 179
+ },
+ {
+ "epoch": 0.1800506392422869,
+ "grad_norm": 0.882273256778717,
+ "learning_rate": 0.0001982251877341748,
+ "loss": 1.8133,
+ "step": 180
+ },
+ {
+ "epoch": 0.1810509205714107,
+ "grad_norm": 0.9015639424324036,
+ "learning_rate": 0.00019820547217631117,
+ "loss": 1.7282,
+ "step": 181
+ },
+ {
+ "epoch": 0.18205120190053453,
+ "grad_norm": 0.9231659173965454,
+ "learning_rate": 0.00019818564870839405,
+ "loss": 1.9094,
+ "step": 182
+ },
+ {
+ "epoch": 0.18305148322965833,
+ "grad_norm": 0.8110967874526978,
+ "learning_rate": 0.00019816571735220583,
+ "loss": 1.886,
+ "step": 183
+ },
+ {
+ "epoch": 0.18405176455878217,
+ "grad_norm": 0.7670036554336548,
+ "learning_rate": 0.00019814567812964748,
+ "loss": 1.9895,
+ "step": 184
+ },
+ {
+ "epoch": 0.18505204588790597,
+ "grad_norm": 0.7955975532531738,
+ "learning_rate": 0.00019812553106273847,
+ "loss": 1.8127,
+ "step": 185
+ },
+ {
+ "epoch": 0.1860523272170298,
+ "grad_norm": 0.8790062665939331,
+ "learning_rate": 0.00019810527617361681,
+ "loss": 1.8899,
+ "step": 186
+ },
+ {
+ "epoch": 0.1870526085461536,
+ "grad_norm": 0.8818586468696594,
+ "learning_rate": 0.00019808491348453894,
+ "loss": 1.7707,
+ "step": 187
+ },
+ {
+ "epoch": 0.18805288987527743,
+ "grad_norm": 0.746442437171936,
+ "learning_rate": 0.00019806444301787978,
+ "loss": 1.7281,
+ "step": 188
+ },
+ {
+ "epoch": 0.18905317120440124,
+ "grad_norm": 0.7786905169487,
+ "learning_rate": 0.0001980438647961327,
+ "loss": 1.7317,
+ "step": 189
+ },
+ {
+ "epoch": 0.19005345253352504,
+ "grad_norm": 0.9338862299919128,
+ "learning_rate": 0.00019802317884190935,
+ "loss": 1.9548,
+ "step": 190
+ },
+ {
+ "epoch": 0.19105373386264887,
+ "grad_norm": 0.7416581511497498,
+ "learning_rate": 0.00019800238517793996,
+ "loss": 1.8601,
+ "step": 191
+ },
+ {
+ "epoch": 0.19205401519177268,
+ "grad_norm": 0.6782898902893066,
+ "learning_rate": 0.00019798148382707296,
+ "loss": 1.8477,
+ "step": 192
+ },
+ {
+ "epoch": 0.1930542965208965,
+ "grad_norm": 0.7389237880706787,
+ "learning_rate": 0.00019796047481227515,
+ "loss": 1.7749,
+ "step": 193
+ },
+ {
+ "epoch": 0.1940545778500203,
+ "grad_norm": 0.9711095094680786,
+ "learning_rate": 0.00019793935815663163,
+ "loss": 2.0899,
+ "step": 194
+ },
+ {
+ "epoch": 0.19505485917914414,
+ "grad_norm": 0.7949391007423401,
+ "learning_rate": 0.00019791813388334581,
+ "loss": 1.8778,
+ "step": 195
+ },
+ {
+ "epoch": 0.19605514050826794,
+ "grad_norm": 0.8871057033538818,
+ "learning_rate": 0.00019789680201573933,
+ "loss": 1.7511,
+ "step": 196
+ },
+ {
+ "epoch": 0.19705542183739178,
+ "grad_norm": 0.8664624094963074,
+ "learning_rate": 0.00019787536257725202,
+ "loss": 1.7232,
+ "step": 197
+ },
+ {
+ "epoch": 0.19805570316651558,
+ "grad_norm": 0.871658980846405,
+ "learning_rate": 0.00019785381559144196,
+ "loss": 1.7987,
+ "step": 198
+ },
+ {
+ "epoch": 0.1990559844956394,
+ "grad_norm": 0.7748361229896545,
+ "learning_rate": 0.00019783216108198542,
+ "loss": 1.9239,
+ "step": 199
+ },
+ {
+ "epoch": 0.2000562658247632,
+ "grad_norm": 0.9393408298492432,
+ "learning_rate": 0.00019781039907267677,
+ "loss": 2.0936,
+ "step": 200
+ },
+ {
+ "epoch": 0.20105654715388704,
+ "grad_norm": 0.8519601225852966,
+ "learning_rate": 0.00019778852958742853,
+ "loss": 1.9108,
+ "step": 201
+ },
+ {
+ "epoch": 0.20205682848301085,
+ "grad_norm": 0.8464863300323486,
+ "learning_rate": 0.00019776655265027127,
+ "loss": 1.897,
+ "step": 202
+ },
+ {
+ "epoch": 0.20305710981213465,
+ "grad_norm": 0.8933351635932922,
+ "learning_rate": 0.00019774446828535371,
+ "loss": 1.8204,
+ "step": 203
+ },
+ {
+ "epoch": 0.20405739114125848,
+ "grad_norm": 0.8305785059928894,
+ "learning_rate": 0.00019772227651694256,
+ "loss": 1.9135,
+ "step": 204
+ },
+ {
+ "epoch": 0.20505767247038229,
+ "grad_norm": 0.8117037415504456,
+ "learning_rate": 0.00019769997736942258,
+ "loss": 1.7585,
+ "step": 205
+ },
+ {
+ "epoch": 0.20605795379950612,
+ "grad_norm": 0.7570348381996155,
+ "learning_rate": 0.00019767757086729647,
+ "loss": 1.8373,
+ "step": 206
+ },
+ {
+ "epoch": 0.20705823512862992,
+ "grad_norm": 0.9291234016418457,
+ "learning_rate": 0.00019765505703518496,
+ "loss": 1.7774,
+ "step": 207
+ },
+ {
+ "epoch": 0.20805851645775375,
+ "grad_norm": 0.8211004137992859,
+ "learning_rate": 0.00019763243589782662,
+ "loss": 1.8766,
+ "step": 208
+ },
+ {
+ "epoch": 0.20905879778687755,
+ "grad_norm": 0.6625431180000305,
+ "learning_rate": 0.00019760970748007803,
+ "loss": 1.628,
+ "step": 209
+ },
+ {
+ "epoch": 0.21005907911600138,
+ "grad_norm": 0.7974782586097717,
+ "learning_rate": 0.0001975868718069136,
+ "loss": 1.6896,
+ "step": 210
+ },
+ {
+ "epoch": 0.2110593604451252,
+ "grad_norm": 0.8364912867546082,
+ "learning_rate": 0.00019756392890342563,
+ "loss": 1.7492,
+ "step": 211
+ },
+ {
+ "epoch": 0.21205964177424902,
+ "grad_norm": 0.8730652332305908,
+ "learning_rate": 0.00019754087879482422,
+ "loss": 1.8295,
+ "step": 212
+ },
+ {
+ "epoch": 0.21305992310337282,
+ "grad_norm": 0.7532863020896912,
+ "learning_rate": 0.00019751772150643722,
+ "loss": 1.8309,
+ "step": 213
+ },
+ {
+ "epoch": 0.21406020443249665,
+ "grad_norm": 0.7375178933143616,
+ "learning_rate": 0.00019749445706371038,
+ "loss": 1.7854,
+ "step": 214
+ },
+ {
+ "epoch": 0.21506048576162046,
+ "grad_norm": 0.7524377703666687,
+ "learning_rate": 0.00019747108549220702,
+ "loss": 1.7683,
+ "step": 215
+ },
+ {
+ "epoch": 0.21606076709074426,
+ "grad_norm": 0.7331809997558594,
+ "learning_rate": 0.00019744760681760832,
+ "loss": 1.7103,
+ "step": 216
+ },
+ {
+ "epoch": 0.2170610484198681,
+ "grad_norm": 0.8083691596984863,
+ "learning_rate": 0.00019742402106571314,
+ "loss": 1.674,
+ "step": 217
+ },
+ {
+ "epoch": 0.2180613297489919,
+ "grad_norm": 0.8524570465087891,
+ "learning_rate": 0.00019740032826243788,
+ "loss": 1.7227,
+ "step": 218
+ },
+ {
+ "epoch": 0.21906161107811573,
+ "grad_norm": 0.7676658630371094,
+ "learning_rate": 0.0001973765284338167,
+ "loss": 1.8561,
+ "step": 219
+ },
+ {
+ "epoch": 0.22006189240723953,
+ "grad_norm": 0.7858710289001465,
+ "learning_rate": 0.00019735262160600127,
+ "loss": 1.7796,
+ "step": 220
+ },
+ {
+ "epoch": 0.22106217373636336,
+ "grad_norm": 0.7587497234344482,
+ "learning_rate": 0.00019732860780526088,
+ "loss": 1.9271,
+ "step": 221
+ },
+ {
+ "epoch": 0.22206245506548716,
+ "grad_norm": 0.8084688186645508,
+ "learning_rate": 0.00019730448705798239,
+ "loss": 1.8176,
+ "step": 222
+ },
+ {
+ "epoch": 0.223062736394611,
+ "grad_norm": 0.6736906170845032,
+ "learning_rate": 0.00019728025939067008,
+ "loss": 1.6288,
+ "step": 223
+ },
+ {
+ "epoch": 0.2240630177237348,
+ "grad_norm": 0.7483925819396973,
+ "learning_rate": 0.00019725592482994583,
+ "loss": 1.8363,
+ "step": 224
+ },
+ {
+ "epoch": 0.22506329905285863,
+ "grad_norm": 1.7995796203613281,
+ "learning_rate": 0.00019723148340254892,
+ "loss": 1.9072,
+ "step": 225
+ },
+ {
+ "epoch": 0.22606358038198243,
+ "grad_norm": 0.8028881549835205,
+ "learning_rate": 0.00019720693513533598,
+ "loss": 1.9021,
+ "step": 226
+ },
+ {
+ "epoch": 0.22706386171110624,
+ "grad_norm": 0.9853909015655518,
+ "learning_rate": 0.00019718228005528122,
+ "loss": 2.0159,
+ "step": 227
+ },
+ {
+ "epoch": 0.22806414304023007,
+ "grad_norm": 0.7784947156906128,
+ "learning_rate": 0.00019715751818947603,
+ "loss": 1.7816,
+ "step": 228
+ },
+ {
+ "epoch": 0.22906442436935387,
+ "grad_norm": 0.7447614669799805,
+ "learning_rate": 0.0001971326495651293,
+ "loss": 1.654,
+ "step": 229
+ },
+ {
+ "epoch": 0.2300647056984777,
+ "grad_norm": 0.8673064112663269,
+ "learning_rate": 0.00019710767420956705,
+ "loss": 2.0049,
+ "step": 230
+ },
+ {
+ "epoch": 0.2310649870276015,
+ "grad_norm": 0.8207747936248779,
+ "learning_rate": 0.0001970825921502328,
+ "loss": 1.9388,
+ "step": 231
+ },
+ {
+ "epoch": 0.23206526835672533,
+ "grad_norm": 0.742266058921814,
+ "learning_rate": 0.0001970574034146871,
+ "loss": 1.7658,
+ "step": 232
+ },
+ {
+ "epoch": 0.23306554968584914,
+ "grad_norm": 0.9097973704338074,
+ "learning_rate": 0.00019703210803060782,
+ "loss": 1.8023,
+ "step": 233
+ },
+ {
+ "epoch": 0.23406583101497297,
+ "grad_norm": 0.7512438297271729,
+ "learning_rate": 0.00019700670602579008,
+ "loss": 1.8551,
+ "step": 234
+ },
+ {
+ "epoch": 0.23506611234409677,
+ "grad_norm": 0.8303943872451782,
+ "learning_rate": 0.00019698119742814606,
+ "loss": 1.7723,
+ "step": 235
+ },
+ {
+ "epoch": 0.2360663936732206,
+ "grad_norm": 0.9195139408111572,
+ "learning_rate": 0.00019695558226570507,
+ "loss": 1.6426,
+ "step": 236
+ },
+ {
+ "epoch": 0.2370666750023444,
+ "grad_norm": 0.7734714150428772,
+ "learning_rate": 0.00019692986056661356,
+ "loss": 1.7798,
+ "step": 237
+ },
+ {
+ "epoch": 0.23806695633146824,
+ "grad_norm": 0.8759648203849792,
+ "learning_rate": 0.00019690403235913504,
+ "loss": 1.6465,
+ "step": 238
+ },
+ {
+ "epoch": 0.23906723766059204,
+ "grad_norm": 0.7688003778457642,
+ "learning_rate": 0.00019687809767165,
+ "loss": 2.0092,
+ "step": 239
+ },
+ {
+ "epoch": 0.24006751898971584,
+ "grad_norm": 0.7398790121078491,
+ "learning_rate": 0.000196852056532656,
+ "loss": 1.8176,
+ "step": 240
+ },
+ {
+ "epoch": 0.24106780031883968,
+ "grad_norm": 0.8921257853507996,
+ "learning_rate": 0.00019682590897076752,
+ "loss": 1.7387,
+ "step": 241
+ },
+ {
+ "epoch": 0.24206808164796348,
+ "grad_norm": 0.7939002513885498,
+ "learning_rate": 0.00019679965501471608,
+ "loss": 1.9417,
+ "step": 242
+ },
+ {
+ "epoch": 0.2430683629770873,
+ "grad_norm": 0.7798025608062744,
+ "learning_rate": 0.0001967732946933499,
+ "loss": 1.7134,
+ "step": 243
+ },
+ {
+ "epoch": 0.2440686443062111,
+ "grad_norm": 0.8007254600524902,
+ "learning_rate": 0.00019674682803563428,
+ "loss": 1.7387,
+ "step": 244
+ },
+ {
+ "epoch": 0.24506892563533494,
+ "grad_norm": 0.6257696151733398,
+ "learning_rate": 0.00019672025507065131,
+ "loss": 1.767,
+ "step": 245
+ },
+ {
+ "epoch": 0.24606920696445875,
+ "grad_norm": 0.7942785620689392,
+ "learning_rate": 0.00019669357582759983,
+ "loss": 1.8801,
+ "step": 246
+ },
+ {
+ "epoch": 0.24706948829358258,
+ "grad_norm": 0.7933829426765442,
+ "learning_rate": 0.00019666679033579552,
+ "loss": 1.9711,
+ "step": 247
+ },
+ {
+ "epoch": 0.24806976962270638,
+ "grad_norm": 0.7489326596260071,
+ "learning_rate": 0.00019663989862467082,
+ "loss": 1.8038,
+ "step": 248
+ },
+ {
+ "epoch": 0.2490700509518302,
+ "grad_norm": 0.7279101014137268,
+ "learning_rate": 0.00019661290072377482,
+ "loss": 1.66,
+ "step": 249
+ },
+ {
+ "epoch": 0.25007033228095404,
+ "grad_norm": 0.6823874115943909,
+ "learning_rate": 0.00019658579666277334,
+ "loss": 1.8064,
+ "step": 250
+ },
+ {
+ "epoch": 0.2510706136100778,
+ "grad_norm": 0.6561273336410522,
+ "learning_rate": 0.0001965585864714488,
+ "loss": 1.6874,
+ "step": 251
+ },
+ {
+ "epoch": 0.25207089493920165,
+ "grad_norm": 0.6457573175430298,
+ "learning_rate": 0.00019653127017970034,
+ "loss": 1.4587,
+ "step": 252
+ },
+ {
+ "epoch": 0.2530711762683255,
+ "grad_norm": 0.7649476528167725,
+ "learning_rate": 0.0001965038478175436,
+ "loss": 1.9811,
+ "step": 253
+ },
+ {
+ "epoch": 0.25407145759744926,
+ "grad_norm": 0.8786829710006714,
+ "learning_rate": 0.00019647631941511082,
+ "loss": 1.8629,
+ "step": 254
+ },
+ {
+ "epoch": 0.2550717389265731,
+ "grad_norm": 0.7038159966468811,
+ "learning_rate": 0.0001964486850026507,
+ "loss": 1.6885,
+ "step": 255
+ },
+ {
+ "epoch": 0.2560720202556969,
+ "grad_norm": 0.7255909442901611,
+ "learning_rate": 0.00019642094461052852,
+ "loss": 1.7335,
+ "step": 256
+ },
+ {
+ "epoch": 0.25707230158482075,
+ "grad_norm": 0.7780727744102478,
+ "learning_rate": 0.00019639309826922585,
+ "loss": 1.899,
+ "step": 257
+ },
+ {
+ "epoch": 0.2580725829139445,
+ "grad_norm": 0.8533650040626526,
+ "learning_rate": 0.0001963651460093409,
+ "loss": 1.7711,
+ "step": 258
+ },
+ {
+ "epoch": 0.25907286424306836,
+ "grad_norm": 0.6440068483352661,
+ "learning_rate": 0.00019633708786158806,
+ "loss": 1.6685,
+ "step": 259
+ },
+ {
+ "epoch": 0.2600731455721922,
+ "grad_norm": 0.6873877048492432,
+ "learning_rate": 0.00019630892385679818,
+ "loss": 1.7502,
+ "step": 260
+ },
+ {
+ "epoch": 0.261073426901316,
+ "grad_norm": 0.7100672721862793,
+ "learning_rate": 0.00019628065402591845,
+ "loss": 1.7789,
+ "step": 261
+ },
+ {
+ "epoch": 0.2620737082304398,
+ "grad_norm": 0.8447420001029968,
+ "learning_rate": 0.00019625227840001225,
+ "loss": 1.8577,
+ "step": 262
+ },
+ {
+ "epoch": 0.2630739895595636,
+ "grad_norm": 0.767888605594635,
+ "learning_rate": 0.0001962237970102593,
+ "loss": 1.5936,
+ "step": 263
+ },
+ {
+ "epoch": 0.26407427088868746,
+ "grad_norm": 0.6955805420875549,
+ "learning_rate": 0.0001961952098879555,
+ "loss": 1.7733,
+ "step": 264
+ },
+ {
+ "epoch": 0.26507455221781123,
+ "grad_norm": 0.777740478515625,
+ "learning_rate": 0.00019616651706451287,
+ "loss": 1.6027,
+ "step": 265
+ },
+ {
+ "epoch": 0.26607483354693506,
+ "grad_norm": 0.7691099047660828,
+ "learning_rate": 0.0001961377185714597,
+ "loss": 1.7457,
+ "step": 266
+ },
+ {
+ "epoch": 0.2670751148760589,
+ "grad_norm": 0.6778420805931091,
+ "learning_rate": 0.0001961088144404403,
+ "loss": 1.7704,
+ "step": 267
+ },
+ {
+ "epoch": 0.2680753962051827,
+ "grad_norm": 0.7943267226219177,
+ "learning_rate": 0.00019607980470321505,
+ "loss": 1.9775,
+ "step": 268
+ },
+ {
+ "epoch": 0.2690756775343065,
+ "grad_norm": 0.6660135388374329,
+ "learning_rate": 0.00019605068939166045,
+ "loss": 1.6556,
+ "step": 269
+ },
+ {
+ "epoch": 0.27007595886343033,
+ "grad_norm": 0.8664935827255249,
+ "learning_rate": 0.00019602146853776894,
+ "loss": 2.03,
+ "step": 270
+ },
+ {
+ "epoch": 0.27107624019255416,
+ "grad_norm": 0.7783074975013733,
+ "learning_rate": 0.000195992142173649,
+ "loss": 1.7426,
+ "step": 271
+ },
+ {
+ "epoch": 0.272076521521678,
+ "grad_norm": 0.7470223903656006,
+ "learning_rate": 0.0001959627103315249,
+ "loss": 1.7284,
+ "step": 272
+ },
+ {
+ "epoch": 0.27307680285080177,
+ "grad_norm": 0.7284931540489197,
+ "learning_rate": 0.00019593317304373705,
+ "loss": 1.6977,
+ "step": 273
+ },
+ {
+ "epoch": 0.2740770841799256,
+ "grad_norm": 0.7201762795448303,
+ "learning_rate": 0.00019590353034274144,
+ "loss": 1.7184,
+ "step": 274
+ },
+ {
+ "epoch": 0.27507736550904943,
+ "grad_norm": 0.6756151914596558,
+ "learning_rate": 0.00019587378226111014,
+ "loss": 1.7276,
+ "step": 275
+ },
+ {
+ "epoch": 0.27607764683817326,
+ "grad_norm": 0.6784201860427856,
+ "learning_rate": 0.00019584392883153088,
+ "loss": 1.642,
+ "step": 276
+ },
+ {
+ "epoch": 0.27707792816729704,
+ "grad_norm": 0.7387176752090454,
+ "learning_rate": 0.00019581397008680717,
+ "loss": 1.7911,
+ "step": 277
+ },
+ {
+ "epoch": 0.27807820949642087,
+ "grad_norm": 0.9367021918296814,
+ "learning_rate": 0.00019578390605985826,
+ "loss": 2.0034,
+ "step": 278
+ },
+ {
+ "epoch": 0.2790784908255447,
+ "grad_norm": 0.803698718547821,
+ "learning_rate": 0.00019575373678371909,
+ "loss": 1.7907,
+ "step": 279
+ },
+ {
+ "epoch": 0.2800787721546685,
+ "grad_norm": 0.7324479818344116,
+ "learning_rate": 0.00019572346229154025,
+ "loss": 1.5539,
+ "step": 280
+ },
+ {
+ "epoch": 0.2810790534837923,
+ "grad_norm": 0.7107382416725159,
+ "learning_rate": 0.00019569308261658787,
+ "loss": 1.838,
+ "step": 281
+ },
+ {
+ "epoch": 0.28207933481291614,
+ "grad_norm": 0.8698626756668091,
+ "learning_rate": 0.00019566259779224378,
+ "loss": 1.7433,
+ "step": 282
+ },
+ {
+ "epoch": 0.28307961614203997,
+ "grad_norm": 0.7804028391838074,
+ "learning_rate": 0.00019563200785200526,
+ "loss": 1.7161,
+ "step": 283
+ },
+ {
+ "epoch": 0.28407989747116374,
+ "grad_norm": 0.8762909173965454,
+ "learning_rate": 0.00019560131282948516,
+ "loss": 1.8031,
+ "step": 284
+ },
+ {
+ "epoch": 0.2850801788002876,
+ "grad_norm": 0.8252436518669128,
+ "learning_rate": 0.0001955705127584117,
+ "loss": 1.6434,
+ "step": 285
+ },
+ {
+ "epoch": 0.2860804601294114,
+ "grad_norm": 0.8220797181129456,
+ "learning_rate": 0.00019553960767262863,
+ "loss": 1.8522,
+ "step": 286
+ },
+ {
+ "epoch": 0.28708074145853524,
+ "grad_norm": 0.7883003950119019,
+ "learning_rate": 0.00019550859760609503,
+ "loss": 1.8245,
+ "step": 287
+ },
+ {
+ "epoch": 0.288081022787659,
+ "grad_norm": 0.9208703637123108,
+ "learning_rate": 0.00019547748259288536,
+ "loss": 1.8877,
+ "step": 288
+ },
+ {
+ "epoch": 0.28908130411678284,
+ "grad_norm": 0.8452202677726746,
+ "learning_rate": 0.0001954462626671894,
+ "loss": 1.554,
+ "step": 289
+ },
+ {
+ "epoch": 0.2900815854459067,
+ "grad_norm": 0.82865971326828,
+ "learning_rate": 0.0001954149378633122,
+ "loss": 1.655,
+ "step": 290
+ },
+ {
+ "epoch": 0.29108186677503045,
+ "grad_norm": 0.7871205806732178,
+ "learning_rate": 0.00019538350821567404,
+ "loss": 1.621,
+ "step": 291
+ },
+ {
+ "epoch": 0.2920821481041543,
+ "grad_norm": 0.8288848996162415,
+ "learning_rate": 0.00019535197375881045,
+ "loss": 1.9277,
+ "step": 292
+ },
+ {
+ "epoch": 0.2930824294332781,
+ "grad_norm": 0.7275516986846924,
+ "learning_rate": 0.00019532033452737205,
+ "loss": 1.7949,
+ "step": 293
+ },
+ {
+ "epoch": 0.29408271076240194,
+ "grad_norm": 0.7424570322036743,
+ "learning_rate": 0.00019528859055612468,
+ "loss": 1.6407,
+ "step": 294
+ },
+ {
+ "epoch": 0.2950829920915257,
+ "grad_norm": 0.7031363248825073,
+ "learning_rate": 0.0001952567418799492,
+ "loss": 1.8793,
+ "step": 295
+ },
+ {
+ "epoch": 0.29608327342064955,
+ "grad_norm": 0.7190185189247131,
+ "learning_rate": 0.00019522478853384155,
+ "loss": 1.6759,
+ "step": 296
+ },
+ {
+ "epoch": 0.2970835547497734,
+ "grad_norm": 0.7270736694335938,
+ "learning_rate": 0.00019519273055291266,
+ "loss": 1.6351,
+ "step": 297
+ },
+ {
+ "epoch": 0.2980838360788972,
+ "grad_norm": 0.8894152641296387,
+ "learning_rate": 0.00019516056797238846,
+ "loss": 1.7908,
+ "step": 298
+ },
+ {
+ "epoch": 0.299084117408021,
+ "grad_norm": 0.9089106321334839,
+ "learning_rate": 0.00019512830082760987,
+ "loss": 1.6018,
+ "step": 299
+ },
+ {
+ "epoch": 0.3000843987371448,
+ "grad_norm": 0.8772429823875427,
+ "learning_rate": 0.00019509592915403255,
+ "loss": 1.8474,
+ "step": 300
+ },
+ {
+ "epoch": 0.30108468006626865,
+ "grad_norm": 0.8244933485984802,
+ "learning_rate": 0.00019506345298722717,
+ "loss": 1.4324,
+ "step": 301
+ },
+ {
+ "epoch": 0.3020849613953925,
+ "grad_norm": 0.7283012866973877,
+ "learning_rate": 0.00019503087236287913,
+ "loss": 1.5115,
+ "step": 302
+ },
+ {
+ "epoch": 0.30308524272451626,
+ "grad_norm": 0.7721333503723145,
+ "learning_rate": 0.00019499818731678873,
+ "loss": 1.6728,
+ "step": 303
+ },
+ {
+ "epoch": 0.3040855240536401,
+ "grad_norm": 0.7579306960105896,
+ "learning_rate": 0.00019496539788487082,
+ "loss": 1.5927,
+ "step": 304
+ },
+ {
+ "epoch": 0.3050858053827639,
+ "grad_norm": 0.9054704308509827,
+ "learning_rate": 0.0001949325041031551,
+ "loss": 1.9027,
+ "step": 305
+ },
+ {
+ "epoch": 0.3060860867118877,
+ "grad_norm": 0.7023262977600098,
+ "learning_rate": 0.0001948995060077859,
+ "loss": 1.7705,
+ "step": 306
+ },
+ {
+ "epoch": 0.3070863680410115,
+ "grad_norm": 0.7942065000534058,
+ "learning_rate": 0.0001948664036350221,
+ "loss": 1.8269,
+ "step": 307
+ },
+ {
+ "epoch": 0.30808664937013536,
+ "grad_norm": 0.9305068850517273,
+ "learning_rate": 0.00019483319702123732,
+ "loss": 1.8247,
+ "step": 308
+ },
+ {
+ "epoch": 0.3090869306992592,
+ "grad_norm": 0.814664900302887,
+ "learning_rate": 0.00019479988620291956,
+ "loss": 1.9179,
+ "step": 309
+ },
+ {
+ "epoch": 0.31008721202838296,
+ "grad_norm": 0.6418014764785767,
+ "learning_rate": 0.00019476647121667137,
+ "loss": 1.5011,
+ "step": 310
+ },
+ {
+ "epoch": 0.3110874933575068,
+ "grad_norm": 0.7911447882652283,
+ "learning_rate": 0.00019473295209920983,
+ "loss": 1.857,
+ "step": 311
+ },
+ {
+ "epoch": 0.3120877746866306,
+ "grad_norm": 0.7792949676513672,
+ "learning_rate": 0.00019469932888736632,
+ "loss": 1.7279,
+ "step": 312
+ },
+ {
+ "epoch": 0.31308805601575446,
+ "grad_norm": 0.7579171657562256,
+ "learning_rate": 0.00019466560161808674,
+ "loss": 1.6902,
+ "step": 313
+ },
+ {
+ "epoch": 0.31408833734487823,
+ "grad_norm": 0.7052372694015503,
+ "learning_rate": 0.00019463177032843124,
+ "loss": 1.7302,
+ "step": 314
+ },
+ {
+ "epoch": 0.31508861867400206,
+ "grad_norm": 0.7188624143600464,
+ "learning_rate": 0.00019459783505557424,
+ "loss": 1.7338,
+ "step": 315
+ },
+ {
+ "epoch": 0.3160889000031259,
+ "grad_norm": 0.6057978272438049,
+ "learning_rate": 0.00019456379583680452,
+ "loss": 1.6123,
+ "step": 316
+ },
+ {
+ "epoch": 0.31708918133224967,
+ "grad_norm": 0.8339365720748901,
+ "learning_rate": 0.000194529652709525,
+ "loss": 1.9765,
+ "step": 317
+ },
+ {
+ "epoch": 0.3180894626613735,
+ "grad_norm": 0.8524260520935059,
+ "learning_rate": 0.00019449540571125286,
+ "loss": 1.6803,
+ "step": 318
+ },
+ {
+ "epoch": 0.31908974399049733,
+ "grad_norm": 0.7035975456237793,
+ "learning_rate": 0.00019446105487961926,
+ "loss": 1.5792,
+ "step": 319
+ },
+ {
+ "epoch": 0.32009002531962116,
+ "grad_norm": 0.7894249558448792,
+ "learning_rate": 0.0001944266002523696,
+ "loss": 1.6326,
+ "step": 320
+ },
+ {
+ "epoch": 0.32109030664874494,
+ "grad_norm": 0.7716989517211914,
+ "learning_rate": 0.0001943920418673633,
+ "loss": 1.6871,
+ "step": 321
+ },
+ {
+ "epoch": 0.32209058797786877,
+ "grad_norm": 0.7914933562278748,
+ "learning_rate": 0.00019435737976257377,
+ "loss": 1.7148,
+ "step": 322
+ },
+ {
+ "epoch": 0.3230908693069926,
+ "grad_norm": 0.7113205790519714,
+ "learning_rate": 0.00019432261397608834,
+ "loss": 1.5236,
+ "step": 323
+ },
+ {
+ "epoch": 0.32409115063611643,
+ "grad_norm": 0.8609917163848877,
+ "learning_rate": 0.00019428774454610843,
+ "loss": 1.8101,
+ "step": 324
+ },
+ {
+ "epoch": 0.3250914319652402,
+ "grad_norm": 0.7319685220718384,
+ "learning_rate": 0.00019425277151094913,
+ "loss": 1.7712,
+ "step": 325
+ },
+ {
+ "epoch": 0.32609171329436404,
+ "grad_norm": 0.6478747725486755,
+ "learning_rate": 0.00019421769490903957,
+ "loss": 1.8535,
+ "step": 326
+ },
+ {
+ "epoch": 0.32709199462348787,
+ "grad_norm": 0.7025763392448425,
+ "learning_rate": 0.0001941825147789225,
+ "loss": 1.9213,
+ "step": 327
+ },
+ {
+ "epoch": 0.32809227595261165,
+ "grad_norm": 0.7595239877700806,
+ "learning_rate": 0.00019414723115925456,
+ "loss": 1.7449,
+ "step": 328
+ },
+ {
+ "epoch": 0.3290925572817355,
+ "grad_norm": 0.7728105783462524,
+ "learning_rate": 0.0001941118440888061,
+ "loss": 1.8821,
+ "step": 329
+ },
+ {
+ "epoch": 0.3300928386108593,
+ "grad_norm": 0.7430977821350098,
+ "learning_rate": 0.0001940763536064611,
+ "loss": 1.6904,
+ "step": 330
+ },
+ {
+ "epoch": 0.33109311993998314,
+ "grad_norm": 0.7909367680549622,
+ "learning_rate": 0.00019404075975121716,
+ "loss": 1.7899,
+ "step": 331
+ },
+ {
+ "epoch": 0.3320934012691069,
+ "grad_norm": 0.7561226487159729,
+ "learning_rate": 0.0001940050625621855,
+ "loss": 1.7746,
+ "step": 332
+ },
+ {
+ "epoch": 0.33309368259823074,
+ "grad_norm": 0.7602452635765076,
+ "learning_rate": 0.00019396926207859084,
+ "loss": 1.7909,
+ "step": 333
+ },
+ {
+ "epoch": 0.3340939639273546,
+ "grad_norm": 0.8194379806518555,
+ "learning_rate": 0.0001939333583397715,
+ "loss": 1.7039,
+ "step": 334
+ },
+ {
+ "epoch": 0.3350942452564784,
+ "grad_norm": 0.7036342024803162,
+ "learning_rate": 0.00019389735138517915,
+ "loss": 1.6663,
+ "step": 335
+ },
+ {
+ "epoch": 0.3360945265856022,
+ "grad_norm": 0.8429521918296814,
+ "learning_rate": 0.00019386124125437895,
+ "loss": 1.589,
+ "step": 336
+ },
+ {
+ "epoch": 0.337094807914726,
+ "grad_norm": 0.7271071076393127,
+ "learning_rate": 0.00019382502798704935,
+ "loss": 1.646,
+ "step": 337
+ },
+ {
+ "epoch": 0.33809508924384984,
+ "grad_norm": 0.7862086892127991,
+ "learning_rate": 0.00019378871162298227,
+ "loss": 1.6085,
+ "step": 338
+ },
+ {
+ "epoch": 0.3390953705729737,
+ "grad_norm": 0.676815390586853,
+ "learning_rate": 0.00019375229220208276,
+ "loss": 1.7335,
+ "step": 339
+ },
+ {
+ "epoch": 0.34009565190209745,
+ "grad_norm": 0.8916042447090149,
+ "learning_rate": 0.00019371576976436917,
+ "loss": 1.7914,
+ "step": 340
+ },
+ {
+ "epoch": 0.3410959332312213,
+ "grad_norm": 0.7913751006126404,
+ "learning_rate": 0.00019367914434997312,
+ "loss": 1.6031,
+ "step": 341
+ },
+ {
+ "epoch": 0.3420962145603451,
+ "grad_norm": 0.7409866452217102,
+ "learning_rate": 0.00019364241599913924,
+ "loss": 1.6525,
+ "step": 342
+ },
+ {
+ "epoch": 0.3430964958894689,
+ "grad_norm": 0.7472705841064453,
+ "learning_rate": 0.0001936055847522254,
+ "loss": 1.6716,
+ "step": 343
+ },
+ {
+ "epoch": 0.3440967772185927,
+ "grad_norm": 0.7030773758888245,
+ "learning_rate": 0.00019356865064970244,
+ "loss": 1.7134,
+ "step": 344
+ },
+ {
+ "epoch": 0.34509705854771655,
+ "grad_norm": 0.6609564423561096,
+ "learning_rate": 0.0001935316137321543,
+ "loss": 1.7127,
+ "step": 345
+ },
+ {
+ "epoch": 0.3460973398768404,
+ "grad_norm": 0.7811393141746521,
+ "learning_rate": 0.00019349447404027782,
+ "loss": 1.75,
+ "step": 346
+ },
+ {
+ "epoch": 0.34709762120596416,
+ "grad_norm": 0.6980521082878113,
+ "learning_rate": 0.00019345723161488283,
+ "loss": 1.82,
+ "step": 347
+ },
+ {
+ "epoch": 0.348097902535088,
+ "grad_norm": 0.749796986579895,
+ "learning_rate": 0.000193419886496892,
+ "loss": 1.9755,
+ "step": 348
+ },
+ {
+ "epoch": 0.3490981838642118,
+ "grad_norm": 0.9486667513847351,
+ "learning_rate": 0.00019338243872734086,
+ "loss": 1.7047,
+ "step": 349
+ },
+ {
+ "epoch": 0.35009846519333565,
+ "grad_norm": 0.8086081147193909,
+ "learning_rate": 0.00019334488834737775,
+ "loss": 1.661,
+ "step": 350
+ },
+ {
+ "epoch": 0.3510987465224594,
+ "grad_norm": 0.700549840927124,
+ "learning_rate": 0.00019330723539826375,
+ "loss": 1.8696,
+ "step": 351
+ },
+ {
+ "epoch": 0.35209902785158326,
+ "grad_norm": 0.7465476393699646,
+ "learning_rate": 0.00019326947992137262,
+ "loss": 1.5444,
+ "step": 352
+ },
+ {
+ "epoch": 0.3530993091807071,
+ "grad_norm": 0.7370999455451965,
+ "learning_rate": 0.00019323162195819082,
+ "loss": 1.8805,
+ "step": 353
+ },
+ {
+ "epoch": 0.35409959050983086,
+ "grad_norm": 0.719359278678894,
+ "learning_rate": 0.0001931936615503174,
+ "loss": 1.8022,
+ "step": 354
+ },
+ {
+ "epoch": 0.3550998718389547,
+ "grad_norm": 0.7301434278488159,
+ "learning_rate": 0.000193155598739464,
+ "loss": 1.6984,
+ "step": 355
+ },
+ {
+ "epoch": 0.3561001531680785,
+ "grad_norm": 0.7191399335861206,
+ "learning_rate": 0.0001931174335674547,
+ "loss": 1.7229,
+ "step": 356
+ },
+ {
+ "epoch": 0.35710043449720236,
+ "grad_norm": 0.7471932768821716,
+ "learning_rate": 0.0001930791660762262,
+ "loss": 1.7408,
+ "step": 357
+ },
+ {
+ "epoch": 0.35810071582632613,
+ "grad_norm": 0.8197934031486511,
+ "learning_rate": 0.00019304079630782752,
+ "loss": 1.6938,
+ "step": 358
+ },
+ {
+ "epoch": 0.35910099715544996,
+ "grad_norm": 0.7408166527748108,
+ "learning_rate": 0.0001930023243044201,
+ "loss": 1.7798,
+ "step": 359
+ },
+ {
+ "epoch": 0.3601012784845738,
+ "grad_norm": 0.7525373101234436,
+ "learning_rate": 0.00019296375010827773,
+ "loss": 1.711,
+ "step": 360
+ },
+ {
+ "epoch": 0.3611015598136976,
+ "grad_norm": 0.6712046265602112,
+ "learning_rate": 0.00019292507376178643,
+ "loss": 1.8157,
+ "step": 361
+ },
+ {
+ "epoch": 0.3621018411428214,
+ "grad_norm": 0.6712916493415833,
+ "learning_rate": 0.00019288629530744454,
+ "loss": 1.8707,
+ "step": 362
+ },
+ {
+ "epoch": 0.36310212247194523,
+ "grad_norm": 0.6127772331237793,
+ "learning_rate": 0.0001928474147878626,
+ "loss": 1.4743,
+ "step": 363
+ },
+ {
+ "epoch": 0.36410240380106906,
+ "grad_norm": 0.910310685634613,
+ "learning_rate": 0.0001928084322457632,
+ "loss": 1.7956,
+ "step": 364
+ },
+ {
+ "epoch": 0.3651026851301929,
+ "grad_norm": 0.6267688870429993,
+ "learning_rate": 0.00019276934772398114,
+ "loss": 1.4664,
+ "step": 365
+ },
+ {
+ "epoch": 0.36610296645931667,
+ "grad_norm": 0.8317943811416626,
+ "learning_rate": 0.00019273016126546323,
+ "loss": 1.853,
+ "step": 366
+ },
+ {
+ "epoch": 0.3671032477884405,
+ "grad_norm": 0.7581344842910767,
+ "learning_rate": 0.00019269087291326833,
+ "loss": 1.9236,
+ "step": 367
+ },
+ {
+ "epoch": 0.36810352911756433,
+ "grad_norm": 0.9311390519142151,
+ "learning_rate": 0.00019265148271056722,
+ "loss": 1.7019,
+ "step": 368
+ },
+ {
+ "epoch": 0.3691038104466881,
+ "grad_norm": 0.9513958096504211,
+ "learning_rate": 0.0001926119907006426,
+ "loss": 1.7617,
+ "step": 369
+ },
+ {
+ "epoch": 0.37010409177581194,
+ "grad_norm": 0.7407613396644592,
+ "learning_rate": 0.00019257239692688907,
+ "loss": 2.1057,
+ "step": 370
+ },
+ {
+ "epoch": 0.37110437310493577,
+ "grad_norm": 0.7530227899551392,
+ "learning_rate": 0.00019253270143281296,
+ "loss": 1.9844,
+ "step": 371
+ },
+ {
+ "epoch": 0.3721046544340596,
+ "grad_norm": 0.6733037233352661,
+ "learning_rate": 0.00019249290426203252,
+ "loss": 1.8939,
+ "step": 372
+ },
+ {
+ "epoch": 0.3731049357631834,
+ "grad_norm": 0.7037007808685303,
+ "learning_rate": 0.0001924530054582776,
+ "loss": 1.6897,
+ "step": 373
+ },
+ {
+ "epoch": 0.3741052170923072,
+ "grad_norm": 0.7689145803451538,
+ "learning_rate": 0.0001924130050653898,
+ "loss": 1.6544,
+ "step": 374
+ },
+ {
+ "epoch": 0.37510549842143104,
+ "grad_norm": 0.7100968360900879,
+ "learning_rate": 0.00019237290312732226,
+ "loss": 1.7932,
+ "step": 375
+ },
+ {
+ "epoch": 0.37610577975055487,
+ "grad_norm": 0.7645193338394165,
+ "learning_rate": 0.00019233269968813984,
+ "loss": 1.6576,
+ "step": 376
+ },
+ {
+ "epoch": 0.37710606107967864,
+ "grad_norm": 0.6831678152084351,
+ "learning_rate": 0.00019229239479201876,
+ "loss": 1.6675,
+ "step": 377
+ },
+ {
+ "epoch": 0.3781063424088025,
+ "grad_norm": 0.8677794337272644,
+ "learning_rate": 0.0001922519884832469,
+ "loss": 1.6064,
+ "step": 378
+ },
+ {
+ "epoch": 0.3791066237379263,
+ "grad_norm": 0.6727691888809204,
+ "learning_rate": 0.0001922114808062234,
+ "loss": 1.6249,
+ "step": 379
+ },
+ {
+ "epoch": 0.3801069050670501,
+ "grad_norm": 0.6588670611381531,
+ "learning_rate": 0.00019217087180545893,
+ "loss": 1.5541,
+ "step": 380
+ },
+ {
+ "epoch": 0.3811071863961739,
+ "grad_norm": 0.7666369080543518,
+ "learning_rate": 0.0001921301615255754,
+ "loss": 1.7806,
+ "step": 381
+ },
+ {
+ "epoch": 0.38210746772529774,
+ "grad_norm": 0.6465156078338623,
+ "learning_rate": 0.0001920893500113061,
+ "loss": 1.512,
+ "step": 382
+ },
+ {
+ "epoch": 0.3831077490544216,
+ "grad_norm": 0.7854346632957458,
+ "learning_rate": 0.00019204843730749547,
+ "loss": 1.6857,
+ "step": 383
+ },
+ {
+ "epoch": 0.38410803038354535,
+ "grad_norm": 0.6625111103057861,
+ "learning_rate": 0.00019200742345909915,
+ "loss": 1.7033,
+ "step": 384
+ },
+ {
+ "epoch": 0.3851083117126692,
+ "grad_norm": 0.7273709177970886,
+ "learning_rate": 0.00019196630851118398,
+ "loss": 1.665,
+ "step": 385
+ },
+ {
+ "epoch": 0.386108593041793,
+ "grad_norm": 0.6861465573310852,
+ "learning_rate": 0.0001919250925089278,
+ "loss": 1.5028,
+ "step": 386
+ },
+ {
+ "epoch": 0.38710887437091684,
+ "grad_norm": 0.845456063747406,
+ "learning_rate": 0.00019188377549761963,
+ "loss": 1.967,
+ "step": 387
+ },
+ {
+ "epoch": 0.3881091557000406,
+ "grad_norm": 0.6481165289878845,
+ "learning_rate": 0.00019184235752265928,
+ "loss": 1.6053,
+ "step": 388
+ },
+ {
+ "epoch": 0.38910943702916445,
+ "grad_norm": 0.6312947273254395,
+ "learning_rate": 0.00019180083862955772,
+ "loss": 1.4427,
+ "step": 389
+ },
+ {
+ "epoch": 0.3901097183582883,
+ "grad_norm": 0.7874154448509216,
+ "learning_rate": 0.00019175921886393666,
+ "loss": 1.6099,
+ "step": 390
+ },
+ {
+ "epoch": 0.39110999968741206,
+ "grad_norm": 0.6839481592178345,
+ "learning_rate": 0.00019171749827152869,
+ "loss": 1.7004,
+ "step": 391
+ },
+ {
+ "epoch": 0.3921102810165359,
+ "grad_norm": 0.7239277362823486,
+ "learning_rate": 0.0001916756768981772,
+ "loss": 1.8813,
+ "step": 392
+ },
+ {
+ "epoch": 0.3931105623456597,
+ "grad_norm": 0.8241100311279297,
+ "learning_rate": 0.00019163375478983632,
+ "loss": 1.9443,
+ "step": 393
+ },
+ {
+ "epoch": 0.39411084367478355,
+ "grad_norm": 0.7401999235153198,
+ "learning_rate": 0.00019159173199257085,
+ "loss": 1.6663,
+ "step": 394
+ },
+ {
+ "epoch": 0.3951111250039073,
+ "grad_norm": 0.8297036290168762,
+ "learning_rate": 0.00019154960855255628,
+ "loss": 1.8012,
+ "step": 395
+ },
+ {
+ "epoch": 0.39611140633303116,
+ "grad_norm": 0.9661216735839844,
+ "learning_rate": 0.0001915073845160786,
+ "loss": 1.7007,
+ "step": 396
+ },
+ {
+ "epoch": 0.397111687662155,
+ "grad_norm": 1.4041926860809326,
+ "learning_rate": 0.00019146505992953446,
+ "loss": 1.7232,
+ "step": 397
+ },
+ {
+ "epoch": 0.3981119689912788,
+ "grad_norm": 0.8469036221504211,
+ "learning_rate": 0.00019142263483943085,
+ "loss": 1.4479,
+ "step": 398
+ },
+ {
+ "epoch": 0.3991122503204026,
+ "grad_norm": 0.9476561546325684,
+ "learning_rate": 0.00019138010929238534,
+ "loss": 1.8572,
+ "step": 399
+ },
+ {
+ "epoch": 0.4001125316495264,
+ "grad_norm": 0.7196705937385559,
+ "learning_rate": 0.00019133748333512575,
+ "loss": 1.6184,
+ "step": 400
+ },
+ {
+ "epoch": 0.40111281297865026,
+ "grad_norm": 0.8957480192184448,
+ "learning_rate": 0.00019129475701449035,
+ "loss": 1.762,
+ "step": 401
+ },
+ {
+ "epoch": 0.4021130943077741,
+ "grad_norm": 0.7850635647773743,
+ "learning_rate": 0.0001912519303774276,
+ "loss": 1.6764,
+ "step": 402
+ },
+ {
+ "epoch": 0.40311337563689786,
+ "grad_norm": 0.7579814791679382,
+ "learning_rate": 0.0001912090034709963,
+ "loss": 1.6231,
+ "step": 403
+ },
+ {
+ "epoch": 0.4041136569660217,
+ "grad_norm": 0.7173107266426086,
+ "learning_rate": 0.00019116597634236525,
+ "loss": 1.7107,
+ "step": 404
+ },
+ {
+ "epoch": 0.4051139382951455,
+ "grad_norm": 0.7832950353622437,
+ "learning_rate": 0.0001911228490388136,
+ "loss": 1.8608,
+ "step": 405
+ },
+ {
+ "epoch": 0.4061142196242693,
+ "grad_norm": 0.716299295425415,
+ "learning_rate": 0.00019107962160773035,
+ "loss": 1.652,
+ "step": 406
+ },
+ {
+ "epoch": 0.40711450095339313,
+ "grad_norm": 0.6675253510475159,
+ "learning_rate": 0.0001910362940966147,
+ "loss": 1.5963,
+ "step": 407
+ },
+ {
+ "epoch": 0.40811478228251696,
+ "grad_norm": 0.6555336713790894,
+ "learning_rate": 0.00019099286655307568,
+ "loss": 1.4991,
+ "step": 408
+ },
+ {
+ "epoch": 0.4091150636116408,
+ "grad_norm": 0.7307867407798767,
+ "learning_rate": 0.0001909493390248324,
+ "loss": 1.8221,
+ "step": 409
+ },
+ {
+ "epoch": 0.41011534494076457,
+ "grad_norm": 0.6557430624961853,
+ "learning_rate": 0.00019090571155971366,
+ "loss": 1.6484,
+ "step": 410
+ },
+ {
+ "epoch": 0.4111156262698884,
+ "grad_norm": 0.6816605925559998,
+ "learning_rate": 0.00019086198420565823,
+ "loss": 1.5052,
+ "step": 411
+ },
+ {
+ "epoch": 0.41211590759901223,
+ "grad_norm": 0.66513592004776,
+ "learning_rate": 0.00019081815701071445,
+ "loss": 1.8818,
+ "step": 412
+ },
+ {
+ "epoch": 0.41311618892813606,
+ "grad_norm": 0.6807469129562378,
+ "learning_rate": 0.0001907742300230406,
+ "loss": 1.5997,
+ "step": 413
+ },
+ {
+ "epoch": 0.41411647025725984,
+ "grad_norm": 0.8060654401779175,
+ "learning_rate": 0.00019073020329090444,
+ "loss": 1.8099,
+ "step": 414
+ },
+ {
+ "epoch": 0.41511675158638367,
+ "grad_norm": 0.7440110445022583,
+ "learning_rate": 0.0001906860768626834,
+ "loss": 1.4876,
+ "step": 415
+ },
+ {
+ "epoch": 0.4161170329155075,
+ "grad_norm": 0.7675415277481079,
+ "learning_rate": 0.00019064185078686443,
+ "loss": 1.4722,
+ "step": 416
+ },
+ {
+ "epoch": 0.4171173142446313,
+ "grad_norm": 0.6656553149223328,
+ "learning_rate": 0.000190597525112044,
+ "loss": 1.6453,
+ "step": 417
+ },
+ {
+ "epoch": 0.4181175955737551,
+ "grad_norm": 0.730689287185669,
+ "learning_rate": 0.000190553099886928,
+ "loss": 1.6584,
+ "step": 418
+ },
+ {
+ "epoch": 0.41911787690287894,
+ "grad_norm": 0.8425858616828918,
+ "learning_rate": 0.00019050857516033173,
+ "loss": 1.6249,
+ "step": 419
+ },
+ {
+ "epoch": 0.42011815823200277,
+ "grad_norm": 0.7816892266273499,
+ "learning_rate": 0.00019046395098117983,
+ "loss": 1.7532,
+ "step": 420
+ },
+ {
+ "epoch": 0.42111843956112655,
+ "grad_norm": 0.7324026823043823,
+ "learning_rate": 0.00019041922739850616,
+ "loss": 1.8523,
+ "step": 421
+ },
+ {
+ "epoch": 0.4221187208902504,
+ "grad_norm": 0.7473389506340027,
+ "learning_rate": 0.00019037440446145385,
+ "loss": 1.583,
+ "step": 422
+ },
+ {
+ "epoch": 0.4231190022193742,
+ "grad_norm": 0.8720895051956177,
+ "learning_rate": 0.00019032948221927524,
+ "loss": 1.6806,
+ "step": 423
+ },
+ {
+ "epoch": 0.42411928354849804,
+ "grad_norm": 0.728528618812561,
+ "learning_rate": 0.00019028446072133175,
+ "loss": 1.7283,
+ "step": 424
+ },
+ {
+ "epoch": 0.4251195648776218,
+ "grad_norm": 0.739930272102356,
+ "learning_rate": 0.00019023934001709383,
+ "loss": 1.7244,
+ "step": 425
+ },
+ {
+ "epoch": 0.42611984620674564,
+ "grad_norm": 0.7825399041175842,
+ "learning_rate": 0.00019019412015614098,
+ "loss": 1.7871,
+ "step": 426
+ },
+ {
+ "epoch": 0.4271201275358695,
+ "grad_norm": 0.8878734707832336,
+ "learning_rate": 0.00019014880118816164,
+ "loss": 1.6018,
+ "step": 427
+ },
+ {
+ "epoch": 0.4281204088649933,
+ "grad_norm": 0.726259708404541,
+ "learning_rate": 0.0001901033831629532,
+ "loss": 1.7732,
+ "step": 428
+ },
+ {
+ "epoch": 0.4291206901941171,
+ "grad_norm": 0.7620319724082947,
+ "learning_rate": 0.00019005786613042185,
+ "loss": 1.6466,
+ "step": 429
+ },
+ {
+ "epoch": 0.4301209715232409,
+ "grad_norm": 0.7295501828193665,
+ "learning_rate": 0.00019001225014058255,
+ "loss": 1.8708,
+ "step": 430
+ },
+ {
+ "epoch": 0.43112125285236474,
+ "grad_norm": 0.7419458031654358,
+ "learning_rate": 0.00018996653524355902,
+ "loss": 1.6583,
+ "step": 431
+ },
+ {
+ "epoch": 0.4321215341814885,
+ "grad_norm": 0.7701705098152161,
+ "learning_rate": 0.00018992072148958368,
+ "loss": 1.4421,
+ "step": 432
+ },
+ {
+ "epoch": 0.43312181551061235,
+ "grad_norm": 0.8237659931182861,
+ "learning_rate": 0.00018987480892899758,
+ "loss": 1.844,
+ "step": 433
+ },
+ {
+ "epoch": 0.4341220968397362,
+ "grad_norm": 0.6167672276496887,
+ "learning_rate": 0.00018982879761225027,
+ "loss": 1.6193,
+ "step": 434
+ },
+ {
+ "epoch": 0.43512237816886,
+ "grad_norm": 0.7565534710884094,
+ "learning_rate": 0.00018978268758989991,
+ "loss": 1.7655,
+ "step": 435
+ },
+ {
+ "epoch": 0.4361226594979838,
+ "grad_norm": 0.8333333730697632,
+ "learning_rate": 0.00018973647891261307,
+ "loss": 1.5764,
+ "step": 436
+ },
+ {
+ "epoch": 0.4371229408271076,
+ "grad_norm": 0.7404434084892273,
+ "learning_rate": 0.00018969017163116472,
+ "loss": 1.7922,
+ "step": 437
+ },
+ {
+ "epoch": 0.43812322215623145,
+ "grad_norm": 0.7129400372505188,
+ "learning_rate": 0.0001896437657964382,
+ "loss": 1.6925,
+ "step": 438
+ },
+ {
+ "epoch": 0.4391235034853553,
+ "grad_norm": 0.7750307321548462,
+ "learning_rate": 0.00018959726145942508,
+ "loss": 1.8133,
+ "step": 439
+ },
+ {
+ "epoch": 0.44012378481447906,
+ "grad_norm": 0.7244920134544373,
+ "learning_rate": 0.00018955065867122528,
+ "loss": 1.6425,
+ "step": 440
+ },
+ {
+ "epoch": 0.4411240661436029,
+ "grad_norm": 0.7624644637107849,
+ "learning_rate": 0.00018950395748304678,
+ "loss": 1.886,
+ "step": 441
+ },
+ {
+ "epoch": 0.4421243474727267,
+ "grad_norm": 0.7016286849975586,
+ "learning_rate": 0.0001894571579462058,
+ "loss": 1.7308,
+ "step": 442
+ },
+ {
+ "epoch": 0.4431246288018505,
+ "grad_norm": 0.6965353488922119,
+ "learning_rate": 0.00018941026011212654,
+ "loss": 1.5872,
+ "step": 443
+ },
+ {
+ "epoch": 0.4441249101309743,
+ "grad_norm": 0.7479360699653625,
+ "learning_rate": 0.00018936326403234125,
+ "loss": 1.8129,
+ "step": 444
+ },
+ {
+ "epoch": 0.44512519146009816,
+ "grad_norm": 0.7027686834335327,
+ "learning_rate": 0.00018931616975849006,
+ "loss": 1.6433,
+ "step": 445
+ },
+ {
+ "epoch": 0.446125472789222,
+ "grad_norm": 0.7771592140197754,
+ "learning_rate": 0.00018926897734232115,
+ "loss": 1.4645,
+ "step": 446
+ },
+ {
+ "epoch": 0.44712575411834576,
+ "grad_norm": 0.7766458988189697,
+ "learning_rate": 0.0001892216868356904,
+ "loss": 1.7873,
+ "step": 447
+ },
+ {
+ "epoch": 0.4481260354474696,
+ "grad_norm": 0.8146182298660278,
+ "learning_rate": 0.0001891742982905615,
+ "loss": 1.7935,
+ "step": 448
+ },
+ {
+ "epoch": 0.4491263167765934,
+ "grad_norm": 0.6744781136512756,
+ "learning_rate": 0.00018912681175900598,
+ "loss": 1.8916,
+ "step": 449
+ },
+ {
+ "epoch": 0.45012659810571726,
+ "grad_norm": 0.6259024143218994,
+ "learning_rate": 0.00018907922729320285,
+ "loss": 1.6616,
+ "step": 450
+ },
+ {
+ "epoch": 0.45112687943484103,
+ "grad_norm": 0.7717494368553162,
+ "learning_rate": 0.00018903154494543889,
+ "loss": 1.817,
+ "step": 451
+ },
+ {
+ "epoch": 0.45212716076396486,
+ "grad_norm": 0.648040771484375,
+ "learning_rate": 0.00018898376476810834,
+ "loss": 1.6309,
+ "step": 452
+ },
+ {
+ "epoch": 0.4531274420930887,
+ "grad_norm": 0.7560017704963684,
+ "learning_rate": 0.00018893588681371303,
+ "loss": 1.8016,
+ "step": 453
+ },
+ {
+ "epoch": 0.45412772342221247,
+ "grad_norm": 0.8778628706932068,
+ "learning_rate": 0.00018888791113486213,
+ "loss": 1.7797,
+ "step": 454
+ },
+ {
+ "epoch": 0.4551280047513363,
+ "grad_norm": 0.6795655488967896,
+ "learning_rate": 0.00018883983778427227,
+ "loss": 1.6343,
+ "step": 455
+ },
+ {
+ "epoch": 0.45612828608046013,
+ "grad_norm": 0.6690213084220886,
+ "learning_rate": 0.0001887916668147673,
+ "loss": 1.6224,
+ "step": 456
+ },
+ {
+ "epoch": 0.45712856740958396,
+ "grad_norm": 0.7529327869415283,
+ "learning_rate": 0.00018874339827927846,
+ "loss": 1.6396,
+ "step": 457
+ },
+ {
+ "epoch": 0.45812884873870774,
+ "grad_norm": 0.8393098711967468,
+ "learning_rate": 0.00018869503223084414,
+ "loss": 1.8374,
+ "step": 458
+ },
+ {
+ "epoch": 0.45912913006783157,
+ "grad_norm": 0.7435776591300964,
+ "learning_rate": 0.00018864656872260985,
+ "loss": 1.7363,
+ "step": 459
+ },
+ {
+ "epoch": 0.4601294113969554,
+ "grad_norm": 0.6737055778503418,
+ "learning_rate": 0.00018859800780782828,
+ "loss": 1.6661,
+ "step": 460
+ },
+ {
+ "epoch": 0.46112969272607923,
+ "grad_norm": 0.7674340605735779,
+ "learning_rate": 0.000188549349539859,
+ "loss": 1.6269,
+ "step": 461
+ },
+ {
+ "epoch": 0.462129974055203,
+ "grad_norm": 0.7329950928688049,
+ "learning_rate": 0.00018850059397216876,
+ "loss": 1.6989,
+ "step": 462
+ },
+ {
+ "epoch": 0.46313025538432684,
+ "grad_norm": 0.7075778841972351,
+ "learning_rate": 0.00018845174115833099,
+ "loss": 1.7286,
+ "step": 463
+ },
+ {
+ "epoch": 0.46413053671345067,
+ "grad_norm": 0.7973611354827881,
+ "learning_rate": 0.0001884027911520262,
+ "loss": 1.7478,
+ "step": 464
+ },
+ {
+ "epoch": 0.4651308180425745,
+ "grad_norm": 0.7790057063102722,
+ "learning_rate": 0.00018835374400704154,
+ "loss": 1.6659,
+ "step": 465
+ },
+ {
+ "epoch": 0.4661310993716983,
+ "grad_norm": 0.8505310416221619,
+ "learning_rate": 0.00018830459977727096,
+ "loss": 1.6879,
+ "step": 466
+ },
+ {
+ "epoch": 0.4671313807008221,
+ "grad_norm": 0.7616267800331116,
+ "learning_rate": 0.0001882553585167151,
+ "loss": 1.6525,
+ "step": 467
+ },
+ {
+ "epoch": 0.46813166202994594,
+ "grad_norm": 0.8038133978843689,
+ "learning_rate": 0.00018820602027948114,
+ "loss": 1.7929,
+ "step": 468
+ },
+ {
+ "epoch": 0.4691319433590697,
+ "grad_norm": 0.6762365698814392,
+ "learning_rate": 0.00018815658511978298,
+ "loss": 1.6926,
+ "step": 469
+ },
+ {
+ "epoch": 0.47013222468819355,
+ "grad_norm": 0.6515015959739685,
+ "learning_rate": 0.00018810705309194083,
+ "loss": 1.7817,
+ "step": 470
+ },
+ {
+ "epoch": 0.4711325060173174,
+ "grad_norm": 0.696675181388855,
+ "learning_rate": 0.00018805742425038145,
+ "loss": 1.7195,
+ "step": 471
+ },
+ {
+ "epoch": 0.4721327873464412,
+ "grad_norm": 0.7929533123970032,
+ "learning_rate": 0.00018800769864963802,
+ "loss": 2.0165,
+ "step": 472
+ },
+ {
+ "epoch": 0.473133068675565,
+ "grad_norm": 0.7223886251449585,
+ "learning_rate": 0.00018795787634434994,
+ "loss": 1.6708,
+ "step": 473
+ },
+ {
+ "epoch": 0.4741333500046888,
+ "grad_norm": 0.7982028126716614,
+ "learning_rate": 0.0001879079573892629,
+ "loss": 1.628,
+ "step": 474
+ },
+ {
+ "epoch": 0.47513363133381264,
+ "grad_norm": 0.6962152123451233,
+ "learning_rate": 0.00018785794183922883,
+ "loss": 1.6429,
+ "step": 475
+ },
+ {
+ "epoch": 0.4761339126629365,
+ "grad_norm": 0.687489926815033,
+ "learning_rate": 0.00018780782974920572,
+ "loss": 1.4546,
+ "step": 476
+ },
+ {
+ "epoch": 0.47713419399206025,
+ "grad_norm": 0.7260375022888184,
+ "learning_rate": 0.00018775762117425777,
+ "loss": 1.739,
+ "step": 477
+ },
+ {
+ "epoch": 0.4781344753211841,
+ "grad_norm": 0.759400486946106,
+ "learning_rate": 0.0001877073161695551,
+ "loss": 1.6465,
+ "step": 478
+ },
+ {
+ "epoch": 0.4791347566503079,
+ "grad_norm": 0.7412364482879639,
+ "learning_rate": 0.00018765691479037376,
+ "loss": 1.7333,
+ "step": 479
+ },
+ {
+ "epoch": 0.4801350379794317,
+ "grad_norm": 0.6909674406051636,
+ "learning_rate": 0.00018760641709209583,
+ "loss": 1.6936,
+ "step": 480
+ },
+ {
+ "epoch": 0.4811353193085555,
+ "grad_norm": 0.6478050947189331,
+ "learning_rate": 0.0001875558231302091,
+ "loss": 1.4435,
+ "step": 481
+ },
+ {
+ "epoch": 0.48213560063767935,
+ "grad_norm": 0.6662638187408447,
+ "learning_rate": 0.00018750513296030718,
+ "loss": 1.5567,
+ "step": 482
+ },
+ {
+ "epoch": 0.4831358819668032,
+ "grad_norm": 0.6973413825035095,
+ "learning_rate": 0.00018745434663808942,
+ "loss": 1.6434,
+ "step": 483
+ },
+ {
+ "epoch": 0.48413616329592696,
+ "grad_norm": 0.8381956815719604,
+ "learning_rate": 0.0001874034642193608,
+ "loss": 1.8568,
+ "step": 484
+ },
+ {
+ "epoch": 0.4851364446250508,
+ "grad_norm": 0.6522262096405029,
+ "learning_rate": 0.0001873524857600319,
+ "loss": 1.4265,
+ "step": 485
+ },
+ {
+ "epoch": 0.4861367259541746,
+ "grad_norm": 0.7018440961837769,
+ "learning_rate": 0.00018730141131611882,
+ "loss": 1.6914,
+ "step": 486
+ },
+ {
+ "epoch": 0.48713700728329845,
+ "grad_norm": 0.8237236142158508,
+ "learning_rate": 0.00018725024094374315,
+ "loss": 1.4462,
+ "step": 487
+ },
+ {
+ "epoch": 0.4881372886124222,
+ "grad_norm": 0.6507928967475891,
+ "learning_rate": 0.00018719897469913184,
+ "loss": 1.5802,
+ "step": 488
+ },
+ {
+ "epoch": 0.48913756994154606,
+ "grad_norm": 0.8120100498199463,
+ "learning_rate": 0.00018714761263861728,
+ "loss": 1.7819,
+ "step": 489
+ },
+ {
+ "epoch": 0.4901378512706699,
+ "grad_norm": 0.759722888469696,
+ "learning_rate": 0.000187096154818637,
+ "loss": 1.7481,
+ "step": 490
+ },
+ {
+ "epoch": 0.4911381325997937,
+ "grad_norm": 0.7146822214126587,
+ "learning_rate": 0.00018704460129573391,
+ "loss": 1.6217,
+ "step": 491
+ },
+ {
+ "epoch": 0.4921384139289175,
+ "grad_norm": 0.7138429284095764,
+ "learning_rate": 0.00018699295212655596,
+ "loss": 1.7242,
+ "step": 492
+ },
+ {
+ "epoch": 0.4931386952580413,
+ "grad_norm": 0.8145613670349121,
+ "learning_rate": 0.00018694120736785632,
+ "loss": 1.8759,
+ "step": 493
+ },
+ {
+ "epoch": 0.49413897658716516,
+ "grad_norm": 0.6624435186386108,
+ "learning_rate": 0.00018688936707649304,
+ "loss": 1.8632,
+ "step": 494
+ },
+ {
+ "epoch": 0.49513925791628893,
+ "grad_norm": 0.6550843119621277,
+ "learning_rate": 0.00018683743130942928,
+ "loss": 1.7645,
+ "step": 495
+ },
+ {
+ "epoch": 0.49613953924541276,
+ "grad_norm": 0.6931488513946533,
+ "learning_rate": 0.00018678540012373302,
+ "loss": 1.5899,
+ "step": 496
+ },
+ {
+ "epoch": 0.4971398205745366,
+ "grad_norm": 0.684229850769043,
+ "learning_rate": 0.00018673327357657715,
+ "loss": 1.6902,
+ "step": 497
+ },
+ {
+ "epoch": 0.4981401019036604,
+ "grad_norm": 0.7380666136741638,
+ "learning_rate": 0.0001866810517252393,
+ "loss": 1.7115,
+ "step": 498
+ },
+ {
+ "epoch": 0.4991403832327842,
+ "grad_norm": 0.6782827973365784,
+ "learning_rate": 0.00018662873462710184,
+ "loss": 1.495,
+ "step": 499
+ },
+ {
+ "epoch": 0.5001406645619081,
+ "grad_norm": 0.730248749256134,
+ "learning_rate": 0.0001865763223396518,
+ "loss": 1.5147,
+ "step": 500
+ },
+ {
+ "epoch": 0.5011409458910319,
+ "grad_norm": 0.7644149661064148,
+ "learning_rate": 0.00018652381492048083,
+ "loss": 1.7278,
+ "step": 501
+ },
+ {
+ "epoch": 0.5021412272201556,
+ "grad_norm": 0.6977668404579163,
+ "learning_rate": 0.00018647121242728506,
+ "loss": 1.5194,
+ "step": 502
+ },
+ {
+ "epoch": 0.5031415085492795,
+ "grad_norm": 0.7714502215385437,
+ "learning_rate": 0.00018641851491786512,
+ "loss": 2.0039,
+ "step": 503
+ },
+ {
+ "epoch": 0.5041417898784033,
+ "grad_norm": 0.9013757109642029,
+ "learning_rate": 0.00018636572245012606,
+ "loss": 1.8069,
+ "step": 504
+ },
+ {
+ "epoch": 0.5051420712075271,
+ "grad_norm": 0.7173192501068115,
+ "learning_rate": 0.00018631283508207725,
+ "loss": 1.841,
+ "step": 505
+ },
+ {
+ "epoch": 0.506142352536651,
+ "grad_norm": 0.7639481425285339,
+ "learning_rate": 0.00018625985287183233,
+ "loss": 1.5333,
+ "step": 506
+ },
+ {
+ "epoch": 0.5071426338657747,
+ "grad_norm": 0.8779808282852173,
+ "learning_rate": 0.00018620677587760916,
+ "loss": 1.8304,
+ "step": 507
+ },
+ {
+ "epoch": 0.5081429151948985,
+ "grad_norm": 0.8956230282783508,
+ "learning_rate": 0.00018615360415772978,
+ "loss": 1.7228,
+ "step": 508
+ },
+ {
+ "epoch": 0.5091431965240224,
+ "grad_norm": 0.8217945098876953,
+ "learning_rate": 0.00018610033777062025,
+ "loss": 1.4977,
+ "step": 509
+ },
+ {
+ "epoch": 0.5101434778531462,
+ "grad_norm": 0.7628902196884155,
+ "learning_rate": 0.0001860469767748108,
+ "loss": 1.7634,
+ "step": 510
+ },
+ {
+ "epoch": 0.5111437591822701,
+ "grad_norm": 0.635911226272583,
+ "learning_rate": 0.00018599352122893539,
+ "loss": 1.5103,
+ "step": 511
+ },
+ {
+ "epoch": 0.5121440405113938,
+ "grad_norm": 0.6439646482467651,
+ "learning_rate": 0.00018593997119173205,
+ "loss": 1.7281,
+ "step": 512
+ },
+ {
+ "epoch": 0.5131443218405176,
+ "grad_norm": 0.7753567099571228,
+ "learning_rate": 0.00018588632672204264,
+ "loss": 1.9028,
+ "step": 513
+ },
+ {
+ "epoch": 0.5141446031696415,
+ "grad_norm": 0.8296717405319214,
+ "learning_rate": 0.0001858325878788126,
+ "loss": 1.9049,
+ "step": 514
+ },
+ {
+ "epoch": 0.5151448844987653,
+ "grad_norm": 0.7379167079925537,
+ "learning_rate": 0.00018577875472109134,
+ "loss": 1.6262,
+ "step": 515
+ },
+ {
+ "epoch": 0.516145165827889,
+ "grad_norm": 0.634040355682373,
+ "learning_rate": 0.0001857248273080317,
+ "loss": 1.5416,
+ "step": 516
+ },
+ {
+ "epoch": 0.5171454471570129,
+ "grad_norm": 0.7394217252731323,
+ "learning_rate": 0.00018567080569889015,
+ "loss": 1.6035,
+ "step": 517
+ },
+ {
+ "epoch": 0.5181457284861367,
+ "grad_norm": 0.705426037311554,
+ "learning_rate": 0.00018561668995302667,
+ "loss": 1.616,
+ "step": 518
+ },
+ {
+ "epoch": 0.5191460098152605,
+ "grad_norm": 0.778021514415741,
+ "learning_rate": 0.00018556248012990468,
+ "loss": 1.5206,
+ "step": 519
+ },
+ {
+ "epoch": 0.5201462911443844,
+ "grad_norm": 0.7203211188316345,
+ "learning_rate": 0.000185508176289091,
+ "loss": 1.5369,
+ "step": 520
+ },
+ {
+ "epoch": 0.5211465724735082,
+ "grad_norm": 0.7390999794006348,
+ "learning_rate": 0.00018545377849025566,
+ "loss": 1.6438,
+ "step": 521
+ },
+ {
+ "epoch": 0.522146853802632,
+ "grad_norm": 0.6779179573059082,
+ "learning_rate": 0.0001853992867931721,
+ "loss": 1.6268,
+ "step": 522
+ },
+ {
+ "epoch": 0.5231471351317558,
+ "grad_norm": 0.6589105725288391,
+ "learning_rate": 0.00018534470125771674,
+ "loss": 1.8353,
+ "step": 523
+ },
+ {
+ "epoch": 0.5241474164608796,
+ "grad_norm": 0.692081868648529,
+ "learning_rate": 0.0001852900219438693,
+ "loss": 1.7047,
+ "step": 524
+ },
+ {
+ "epoch": 0.5251476977900035,
+ "grad_norm": 0.6639224886894226,
+ "learning_rate": 0.0001852352489117124,
+ "loss": 1.7448,
+ "step": 525
+ },
+ {
+ "epoch": 0.5261479791191273,
+ "grad_norm": 0.7168188095092773,
+ "learning_rate": 0.00018518038222143174,
+ "loss": 1.6734,
+ "step": 526
+ },
+ {
+ "epoch": 0.527148260448251,
+ "grad_norm": 0.7671873569488525,
+ "learning_rate": 0.00018512542193331583,
+ "loss": 1.9392,
+ "step": 527
+ },
+ {
+ "epoch": 0.5281485417773749,
+ "grad_norm": 0.7861583828926086,
+ "learning_rate": 0.00018507036810775615,
+ "loss": 1.5749,
+ "step": 528
+ },
+ {
+ "epoch": 0.5291488231064987,
+ "grad_norm": 0.6727952361106873,
+ "learning_rate": 0.00018501522080524688,
+ "loss": 1.7584,
+ "step": 529
+ },
+ {
+ "epoch": 0.5301491044356225,
+ "grad_norm": 0.7287748456001282,
+ "learning_rate": 0.0001849599800863849,
+ "loss": 1.783,
+ "step": 530
+ },
+ {
+ "epoch": 0.5311493857647464,
+ "grad_norm": 0.6883361339569092,
+ "learning_rate": 0.0001849046460118698,
+ "loss": 1.6104,
+ "step": 531
+ },
+ {
+ "epoch": 0.5321496670938701,
+ "grad_norm": 0.6767789125442505,
+ "learning_rate": 0.0001848492186425037,
+ "loss": 1.8416,
+ "step": 532
+ },
+ {
+ "epoch": 0.533149948422994,
+ "grad_norm": 0.7468088269233704,
+ "learning_rate": 0.0001847936980391913,
+ "loss": 1.8297,
+ "step": 533
+ },
+ {
+ "epoch": 0.5341502297521178,
+ "grad_norm": 0.7560007572174072,
+ "learning_rate": 0.00018473808426293964,
+ "loss": 1.5405,
+ "step": 534
+ },
+ {
+ "epoch": 0.5351505110812416,
+ "grad_norm": 0.6900463104248047,
+ "learning_rate": 0.00018468237737485823,
+ "loss": 1.5117,
+ "step": 535
+ },
+ {
+ "epoch": 0.5361507924103655,
+ "grad_norm": 0.8691229820251465,
+ "learning_rate": 0.00018462657743615888,
+ "loss": 1.724,
+ "step": 536
+ },
+ {
+ "epoch": 0.5371510737394892,
+ "grad_norm": 0.7081372141838074,
+ "learning_rate": 0.00018457068450815562,
+ "loss": 1.6418,
+ "step": 537
+ },
+ {
+ "epoch": 0.538151355068613,
+ "grad_norm": 0.8148525953292847,
+ "learning_rate": 0.00018451469865226464,
+ "loss": 1.8564,
+ "step": 538
+ },
+ {
+ "epoch": 0.5391516363977369,
+ "grad_norm": 0.7306076288223267,
+ "learning_rate": 0.00018445861993000436,
+ "loss": 1.4974,
+ "step": 539
+ },
+ {
+ "epoch": 0.5401519177268607,
+ "grad_norm": 0.815825343132019,
+ "learning_rate": 0.00018440244840299506,
+ "loss": 1.8965,
+ "step": 540
+ },
+ {
+ "epoch": 0.5411521990559846,
+ "grad_norm": 0.753034234046936,
+ "learning_rate": 0.0001843461841329591,
+ "loss": 2.016,
+ "step": 541
+ },
+ {
+ "epoch": 0.5421524803851083,
+ "grad_norm": 0.8658022284507751,
+ "learning_rate": 0.0001842898271817208,
+ "loss": 1.6697,
+ "step": 542
+ },
+ {
+ "epoch": 0.5431527617142321,
+ "grad_norm": 0.7143135666847229,
+ "learning_rate": 0.00018423337761120618,
+ "loss": 1.8741,
+ "step": 543
+ },
+ {
+ "epoch": 0.544153043043356,
+ "grad_norm": 0.6843370795249939,
+ "learning_rate": 0.00018417683548344318,
+ "loss": 1.763,
+ "step": 544
+ },
+ {
+ "epoch": 0.5451533243724798,
+ "grad_norm": 0.6699584126472473,
+ "learning_rate": 0.00018412020086056133,
+ "loss": 1.7126,
+ "step": 545
+ },
+ {
+ "epoch": 0.5461536057016035,
+ "grad_norm": 0.6921600699424744,
+ "learning_rate": 0.0001840634738047918,
+ "loss": 1.6697,
+ "step": 546
+ },
+ {
+ "epoch": 0.5471538870307274,
+ "grad_norm": 0.822501003742218,
+ "learning_rate": 0.0001840066543784675,
+ "loss": 1.7045,
+ "step": 547
+ },
+ {
+ "epoch": 0.5481541683598512,
+ "grad_norm": 0.7563886046409607,
+ "learning_rate": 0.00018394974264402257,
+ "loss": 1.6853,
+ "step": 548
+ },
+ {
+ "epoch": 0.549154449688975,
+ "grad_norm": 0.7408218383789062,
+ "learning_rate": 0.00018389273866399275,
+ "loss": 1.6496,
+ "step": 549
+ },
+ {
+ "epoch": 0.5501547310180989,
+ "grad_norm": 0.6454717516899109,
+ "learning_rate": 0.00018383564250101512,
+ "loss": 1.5063,
+ "step": 550
+ },
+ {
+ "epoch": 0.5511550123472226,
+ "grad_norm": 0.7033074498176575,
+ "learning_rate": 0.000183778454217828,
+ "loss": 1.6432,
+ "step": 551
+ },
+ {
+ "epoch": 0.5521552936763465,
+ "grad_norm": 0.768194854259491,
+ "learning_rate": 0.0001837211738772711,
+ "loss": 2.0594,
+ "step": 552
+ },
+ {
+ "epoch": 0.5531555750054703,
+ "grad_norm": 0.7805166244506836,
+ "learning_rate": 0.000183663801542285,
+ "loss": 1.4317,
+ "step": 553
+ },
+ {
+ "epoch": 0.5541558563345941,
+ "grad_norm": 0.6603556871414185,
+ "learning_rate": 0.00018360633727591155,
+ "loss": 1.4171,
+ "step": 554
+ },
+ {
+ "epoch": 0.555156137663718,
+ "grad_norm": 0.6996607780456543,
+ "learning_rate": 0.00018354878114129367,
+ "loss": 1.6832,
+ "step": 555
+ },
+ {
+ "epoch": 0.5561564189928417,
+ "grad_norm": 0.7861623167991638,
+ "learning_rate": 0.00018349113320167504,
+ "loss": 1.8425,
+ "step": 556
+ },
+ {
+ "epoch": 0.5571567003219655,
+ "grad_norm": 0.8387210369110107,
+ "learning_rate": 0.00018343339352040042,
+ "loss": 2.1272,
+ "step": 557
+ },
+ {
+ "epoch": 0.5581569816510894,
+ "grad_norm": 0.829555094242096,
+ "learning_rate": 0.00018337556216091517,
+ "loss": 1.4835,
+ "step": 558
+ },
+ {
+ "epoch": 0.5591572629802132,
+ "grad_norm": 0.7824863791465759,
+ "learning_rate": 0.00018331763918676556,
+ "loss": 1.8893,
+ "step": 559
+ },
+ {
+ "epoch": 0.560157544309337,
+ "grad_norm": 0.707683801651001,
+ "learning_rate": 0.00018325962466159848,
+ "loss": 1.6492,
+ "step": 560
+ },
+ {
+ "epoch": 0.5611578256384608,
+ "grad_norm": 0.775600254535675,
+ "learning_rate": 0.00018320151864916135,
+ "loss": 1.6542,
+ "step": 561
+ },
+ {
+ "epoch": 0.5621581069675846,
+ "grad_norm": 0.7602002024650574,
+ "learning_rate": 0.00018314332121330225,
+ "loss": 1.9625,
+ "step": 562
+ },
+ {
+ "epoch": 0.5631583882967085,
+ "grad_norm": 0.7535431385040283,
+ "learning_rate": 0.0001830850324179695,
+ "loss": 1.6407,
+ "step": 563
+ },
+ {
+ "epoch": 0.5641586696258323,
+ "grad_norm": 1.1884644031524658,
+ "learning_rate": 0.00018302665232721208,
+ "loss": 1.6188,
+ "step": 564
+ },
+ {
+ "epoch": 0.565158950954956,
+ "grad_norm": 0.7576595544815063,
+ "learning_rate": 0.0001829681810051791,
+ "loss": 1.7739,
+ "step": 565
+ },
+ {
+ "epoch": 0.5661592322840799,
+ "grad_norm": 0.6807442307472229,
+ "learning_rate": 0.00018290961851611995,
+ "loss": 1.6244,
+ "step": 566
+ },
+ {
+ "epoch": 0.5671595136132037,
+ "grad_norm": 0.7222456932067871,
+ "learning_rate": 0.00018285096492438424,
+ "loss": 1.7809,
+ "step": 567
+ },
+ {
+ "epoch": 0.5681597949423275,
+ "grad_norm": 0.7002213597297668,
+ "learning_rate": 0.00018279222029442163,
+ "loss": 1.5462,
+ "step": 568
+ },
+ {
+ "epoch": 0.5691600762714514,
+ "grad_norm": 0.8433569669723511,
+ "learning_rate": 0.00018273338469078186,
+ "loss": 1.5042,
+ "step": 569
+ },
+ {
+ "epoch": 0.5701603576005752,
+ "grad_norm": 0.663144588470459,
+ "learning_rate": 0.00018267445817811466,
+ "loss": 1.7133,
+ "step": 570
+ },
+ {
+ "epoch": 0.5711606389296989,
+ "grad_norm": 0.7298465967178345,
+ "learning_rate": 0.00018261544082116954,
+ "loss": 2.0201,
+ "step": 571
+ },
+ {
+ "epoch": 0.5721609202588228,
+ "grad_norm": 0.7613754868507385,
+ "learning_rate": 0.00018255633268479595,
+ "loss": 1.8065,
+ "step": 572
+ },
+ {
+ "epoch": 0.5731612015879466,
+ "grad_norm": 0.7252177596092224,
+ "learning_rate": 0.00018249713383394303,
+ "loss": 1.5715,
+ "step": 573
+ },
+ {
+ "epoch": 0.5741614829170705,
+ "grad_norm": 0.783961832523346,
+ "learning_rate": 0.0001824378443336596,
+ "loss": 1.7102,
+ "step": 574
+ },
+ {
+ "epoch": 0.5751617642461943,
+ "grad_norm": 0.8532115817070007,
+ "learning_rate": 0.00018237846424909413,
+ "loss": 1.7011,
+ "step": 575
+ },
+ {
+ "epoch": 0.576162045575318,
+ "grad_norm": 0.7841559052467346,
+ "learning_rate": 0.00018231899364549455,
+ "loss": 1.6397,
+ "step": 576
+ },
+ {
+ "epoch": 0.5771623269044419,
+ "grad_norm": 0.7118051648139954,
+ "learning_rate": 0.00018225943258820833,
+ "loss": 1.7166,
+ "step": 577
+ },
+ {
+ "epoch": 0.5781626082335657,
+ "grad_norm": 0.7298933863639832,
+ "learning_rate": 0.00018219978114268227,
+ "loss": 1.604,
+ "step": 578
+ },
+ {
+ "epoch": 0.5791628895626895,
+ "grad_norm": 0.6644678711891174,
+ "learning_rate": 0.00018214003937446253,
+ "loss": 1.7673,
+ "step": 579
+ },
+ {
+ "epoch": 0.5801631708918134,
+ "grad_norm": 0.6707085371017456,
+ "learning_rate": 0.00018208020734919455,
+ "loss": 1.662,
+ "step": 580
+ },
+ {
+ "epoch": 0.5811634522209371,
+ "grad_norm": 0.7431498765945435,
+ "learning_rate": 0.00018202028513262288,
+ "loss": 1.7757,
+ "step": 581
+ },
+ {
+ "epoch": 0.5821637335500609,
+ "grad_norm": 0.6936060190200806,
+ "learning_rate": 0.00018196027279059117,
+ "loss": 1.8464,
+ "step": 582
+ },
+ {
+ "epoch": 0.5831640148791848,
+ "grad_norm": 0.6512508988380432,
+ "learning_rate": 0.00018190017038904215,
+ "loss": 1.5537,
+ "step": 583
+ },
+ {
+ "epoch": 0.5841642962083086,
+ "grad_norm": 0.7541425228118896,
+ "learning_rate": 0.0001818399779940175,
+ "loss": 1.8552,
+ "step": 584
+ },
+ {
+ "epoch": 0.5851645775374325,
+ "grad_norm": 0.720447301864624,
+ "learning_rate": 0.0001817796956716578,
+ "loss": 1.5392,
+ "step": 585
+ },
+ {
+ "epoch": 0.5861648588665562,
+ "grad_norm": 0.7526831030845642,
+ "learning_rate": 0.00018171932348820234,
+ "loss": 1.8224,
+ "step": 586
+ },
+ {
+ "epoch": 0.58716514019568,
+ "grad_norm": 0.6906039714813232,
+ "learning_rate": 0.0001816588615099893,
+ "loss": 1.6498,
+ "step": 587
+ },
+ {
+ "epoch": 0.5881654215248039,
+ "grad_norm": 0.726737380027771,
+ "learning_rate": 0.00018159830980345548,
+ "loss": 1.5377,
+ "step": 588
+ },
+ {
+ "epoch": 0.5891657028539277,
+ "grad_norm": 0.6792006492614746,
+ "learning_rate": 0.0001815376684351362,
+ "loss": 1.8905,
+ "step": 589
+ },
+ {
+ "epoch": 0.5901659841830514,
+ "grad_norm": 0.7885284423828125,
+ "learning_rate": 0.00018147693747166534,
+ "loss": 1.709,
+ "step": 590
+ },
+ {
+ "epoch": 0.5911662655121753,
+ "grad_norm": 0.7270367741584778,
+ "learning_rate": 0.00018141611697977529,
+ "loss": 1.7973,
+ "step": 591
+ },
+ {
+ "epoch": 0.5921665468412991,
+ "grad_norm": 0.7852438688278198,
+ "learning_rate": 0.00018135520702629675,
+ "loss": 1.6312,
+ "step": 592
+ },
+ {
+ "epoch": 0.5931668281704229,
+ "grad_norm": 0.7881343960762024,
+ "learning_rate": 0.0001812942076781588,
+ "loss": 1.7581,
+ "step": 593
+ },
+ {
+ "epoch": 0.5941671094995468,
+ "grad_norm": 0.7581279277801514,
+ "learning_rate": 0.0001812331190023886,
+ "loss": 1.6811,
+ "step": 594
+ },
+ {
+ "epoch": 0.5951673908286705,
+ "grad_norm": 0.7250573039054871,
+ "learning_rate": 0.0001811719410661116,
+ "loss": 1.6835,
+ "step": 595
+ },
+ {
+ "epoch": 0.5961676721577944,
+ "grad_norm": 0.7383652329444885,
+ "learning_rate": 0.00018111067393655132,
+ "loss": 1.7804,
+ "step": 596
+ },
+ {
+ "epoch": 0.5971679534869182,
+ "grad_norm": 0.6631885170936584,
+ "learning_rate": 0.0001810493176810292,
+ "loss": 1.477,
+ "step": 597
+ },
+ {
+ "epoch": 0.598168234816042,
+ "grad_norm": 0.6705698370933533,
+ "learning_rate": 0.00018098787236696474,
+ "loss": 1.5939,
+ "step": 598
+ },
+ {
+ "epoch": 0.5991685161451659,
+ "grad_norm": 0.6646862626075745,
+ "learning_rate": 0.00018092633806187513,
+ "loss": 1.5903,
+ "step": 599
+ },
+ {
+ "epoch": 0.6001687974742896,
+ "grad_norm": 0.7267604470252991,
+ "learning_rate": 0.0001808647148333755,
+ "loss": 1.6864,
+ "step": 600
+ },
+ {
+ "epoch": 0.6011690788034134,
+ "grad_norm": 0.672102689743042,
+ "learning_rate": 0.00018080300274917862,
+ "loss": 1.73,
+ "step": 601
+ },
+ {
+ "epoch": 0.6021693601325373,
+ "grad_norm": 0.7541230320930481,
+ "learning_rate": 0.00018074120187709495,
+ "loss": 1.7824,
+ "step": 602
+ },
+ {
+ "epoch": 0.6031696414616611,
+ "grad_norm": 0.6589316129684448,
+ "learning_rate": 0.00018067931228503246,
+ "loss": 1.7007,
+ "step": 603
+ },
+ {
+ "epoch": 0.604169922790785,
+ "grad_norm": 0.7083007097244263,
+ "learning_rate": 0.00018061733404099655,
+ "loss": 1.7703,
+ "step": 604
+ },
+ {
+ "epoch": 0.6051702041199087,
+ "grad_norm": 0.6700689196586609,
+ "learning_rate": 0.00018055526721309016,
+ "loss": 1.5967,
+ "step": 605
+ },
+ {
+ "epoch": 0.6061704854490325,
+ "grad_norm": 0.6766354441642761,
+ "learning_rate": 0.0001804931118695135,
+ "loss": 1.6208,
+ "step": 606
+ },
+ {
+ "epoch": 0.6071707667781564,
+ "grad_norm": 0.8214102387428284,
+ "learning_rate": 0.00018043086807856403,
+ "loss": 1.6084,
+ "step": 607
+ },
+ {
+ "epoch": 0.6081710481072802,
+ "grad_norm": 0.736492395401001,
+ "learning_rate": 0.00018036853590863648,
+ "loss": 1.7254,
+ "step": 608
+ },
+ {
+ "epoch": 0.609171329436404,
+ "grad_norm": 0.6791033148765564,
+ "learning_rate": 0.00018030611542822257,
+ "loss": 1.5837,
+ "step": 609
+ },
+ {
+ "epoch": 0.6101716107655278,
+ "grad_norm": 0.6344060301780701,
+ "learning_rate": 0.00018024360670591114,
+ "loss": 1.5407,
+ "step": 610
+ },
+ {
+ "epoch": 0.6111718920946516,
+ "grad_norm": 0.9026575088500977,
+ "learning_rate": 0.00018018100981038798,
+ "loss": 1.6748,
+ "step": 611
+ },
+ {
+ "epoch": 0.6121721734237754,
+ "grad_norm": 0.8029866814613342,
+ "learning_rate": 0.00018011832481043576,
+ "loss": 1.7547,
+ "step": 612
+ },
+ {
+ "epoch": 0.6131724547528993,
+ "grad_norm": 0.8065117597579956,
+ "learning_rate": 0.00018005555177493394,
+ "loss": 1.8051,
+ "step": 613
+ },
+ {
+ "epoch": 0.614172736082023,
+ "grad_norm": 0.7858480215072632,
+ "learning_rate": 0.00017999269077285875,
+ "loss": 1.6728,
+ "step": 614
+ },
+ {
+ "epoch": 0.6151730174111469,
+ "grad_norm": 0.6735272407531738,
+ "learning_rate": 0.00017992974187328305,
+ "loss": 1.8585,
+ "step": 615
+ },
+ {
+ "epoch": 0.6161732987402707,
+ "grad_norm": 0.7518951892852783,
+ "learning_rate": 0.00017986670514537627,
+ "loss": 1.5429,
+ "step": 616
+ },
+ {
+ "epoch": 0.6171735800693945,
+ "grad_norm": 0.6952928900718689,
+ "learning_rate": 0.00017980358065840444,
+ "loss": 1.5982,
+ "step": 617
+ },
+ {
+ "epoch": 0.6181738613985184,
+ "grad_norm": 0.8996840119361877,
+ "learning_rate": 0.0001797403684817299,
+ "loss": 1.8164,
+ "step": 618
+ },
+ {
+ "epoch": 0.6191741427276422,
+ "grad_norm": 0.7645425200462341,
+ "learning_rate": 0.00017967706868481144,
+ "loss": 1.8373,
+ "step": 619
+ },
+ {
+ "epoch": 0.6201744240567659,
+ "grad_norm": 0.8479064106941223,
+ "learning_rate": 0.00017961368133720407,
+ "loss": 1.6483,
+ "step": 620
+ },
+ {
+ "epoch": 0.6211747053858898,
+ "grad_norm": 0.7806827425956726,
+ "learning_rate": 0.000179550206508559,
+ "loss": 1.78,
+ "step": 621
+ },
+ {
+ "epoch": 0.6221749867150136,
+ "grad_norm": 0.6476775407791138,
+ "learning_rate": 0.00017948664426862364,
+ "loss": 1.7712,
+ "step": 622
+ },
+ {
+ "epoch": 0.6231752680441374,
+ "grad_norm": 0.8421279788017273,
+ "learning_rate": 0.00017942299468724134,
+ "loss": 1.7753,
+ "step": 623
+ },
+ {
+ "epoch": 0.6241755493732613,
+ "grad_norm": 0.6706071496009827,
+ "learning_rate": 0.0001793592578343515,
+ "loss": 1.4093,
+ "step": 624
+ },
+ {
+ "epoch": 0.625175830702385,
+ "grad_norm": 0.8224231004714966,
+ "learning_rate": 0.0001792954337799894,
+ "loss": 1.7343,
+ "step": 625
+ },
+ {
+ "epoch": 0.6261761120315089,
+ "grad_norm": 0.8398690819740295,
+ "learning_rate": 0.00017923152259428612,
+ "loss": 1.8017,
+ "step": 626
+ },
+ {
+ "epoch": 0.6271763933606327,
+ "grad_norm": 0.6664738059043884,
+ "learning_rate": 0.00017916752434746856,
+ "loss": 1.6023,
+ "step": 627
+ },
+ {
+ "epoch": 0.6281766746897565,
+ "grad_norm": 0.9246477484703064,
+ "learning_rate": 0.0001791034391098591,
+ "loss": 1.7862,
+ "step": 628
+ },
+ {
+ "epoch": 0.6291769560188804,
+ "grad_norm": 0.797835111618042,
+ "learning_rate": 0.00017903926695187595,
+ "loss": 1.6059,
+ "step": 629
+ },
+ {
+ "epoch": 0.6301772373480041,
+ "grad_norm": 0.613727331161499,
+ "learning_rate": 0.0001789750079440326,
+ "loss": 1.5086,
+ "step": 630
+ },
+ {
+ "epoch": 0.6311775186771279,
+ "grad_norm": 0.7127765417098999,
+ "learning_rate": 0.00017891066215693817,
+ "loss": 1.5985,
+ "step": 631
+ },
+ {
+ "epoch": 0.6321778000062518,
+ "grad_norm": 0.6923073530197144,
+ "learning_rate": 0.00017884622966129695,
+ "loss": 1.5537,
+ "step": 632
+ },
+ {
+ "epoch": 0.6331780813353756,
+ "grad_norm": 0.7015733122825623,
+ "learning_rate": 0.00017878171052790868,
+ "loss": 1.7782,
+ "step": 633
+ },
+ {
+ "epoch": 0.6341783626644993,
+ "grad_norm": 0.6932784914970398,
+ "learning_rate": 0.00017871710482766817,
+ "loss": 1.4294,
+ "step": 634
+ },
+ {
+ "epoch": 0.6351786439936232,
+ "grad_norm": 0.7054254412651062,
+ "learning_rate": 0.00017865241263156546,
+ "loss": 1.7227,
+ "step": 635
+ },
+ {
+ "epoch": 0.636178925322747,
+ "grad_norm": 0.6994242072105408,
+ "learning_rate": 0.0001785876340106855,
+ "loss": 1.5998,
+ "step": 636
+ },
+ {
+ "epoch": 0.6371792066518709,
+ "grad_norm": 0.81461501121521,
+ "learning_rate": 0.0001785227690362083,
+ "loss": 1.7212,
+ "step": 637
+ },
+ {
+ "epoch": 0.6381794879809947,
+ "grad_norm": 0.943434476852417,
+ "learning_rate": 0.00017845781777940878,
+ "loss": 1.5926,
+ "step": 638
+ },
+ {
+ "epoch": 0.6391797693101184,
+ "grad_norm": 0.8455945253372192,
+ "learning_rate": 0.00017839278031165658,
+ "loss": 1.8511,
+ "step": 639
+ },
+ {
+ "epoch": 0.6401800506392423,
+ "grad_norm": 0.9348243474960327,
+ "learning_rate": 0.00017832765670441612,
+ "loss": 1.6293,
+ "step": 640
+ },
+ {
+ "epoch": 0.6411803319683661,
+ "grad_norm": 0.746127724647522,
+ "learning_rate": 0.0001782624470292465,
+ "loss": 1.4903,
+ "step": 641
+ },
+ {
+ "epoch": 0.6421806132974899,
+ "grad_norm": 0.6215783357620239,
+ "learning_rate": 0.0001781971513578013,
+ "loss": 1.7806,
+ "step": 642
+ },
+ {
+ "epoch": 0.6431808946266138,
+ "grad_norm": 0.7447994947433472,
+ "learning_rate": 0.00017813176976182873,
+ "loss": 1.7475,
+ "step": 643
+ },
+ {
+ "epoch": 0.6441811759557375,
+ "grad_norm": 0.6916540265083313,
+ "learning_rate": 0.00017806630231317127,
+ "loss": 1.6401,
+ "step": 644
+ },
+ {
+ "epoch": 0.6451814572848613,
+ "grad_norm": 0.7208524942398071,
+ "learning_rate": 0.00017800074908376584,
+ "loss": 1.7524,
+ "step": 645
+ },
+ {
+ "epoch": 0.6461817386139852,
+ "grad_norm": 0.7548331618309021,
+ "learning_rate": 0.00017793511014564358,
+ "loss": 1.5644,
+ "step": 646
+ },
+ {
+ "epoch": 0.647182019943109,
+ "grad_norm": 0.7919667959213257,
+ "learning_rate": 0.00017786938557092983,
+ "loss": 1.6758,
+ "step": 647
+ },
+ {
+ "epoch": 0.6481823012722329,
+ "grad_norm": 0.700618326663971,
+ "learning_rate": 0.00017780357543184397,
+ "loss": 1.5213,
+ "step": 648
+ },
+ {
+ "epoch": 0.6491825826013566,
+ "grad_norm": 0.6646535992622375,
+ "learning_rate": 0.00017773767980069945,
+ "loss": 1.6487,
+ "step": 649
+ },
+ {
+ "epoch": 0.6501828639304804,
+ "grad_norm": 0.6486669182777405,
+ "learning_rate": 0.0001776716987499037,
+ "loss": 1.6556,
+ "step": 650
+ },
+ {
+ "epoch": 0.6511831452596043,
+ "grad_norm": 0.657747745513916,
+ "learning_rate": 0.0001776056323519579,
+ "loss": 1.5943,
+ "step": 651
+ },
+ {
+ "epoch": 0.6521834265887281,
+ "grad_norm": 0.7777379751205444,
+ "learning_rate": 0.00017753948067945712,
+ "loss": 1.6069,
+ "step": 652
+ },
+ {
+ "epoch": 0.6531837079178519,
+ "grad_norm": 0.772153913974762,
+ "learning_rate": 0.00017747324380509006,
+ "loss": 1.7065,
+ "step": 653
+ },
+ {
+ "epoch": 0.6541839892469757,
+ "grad_norm": 0.6984367966651917,
+ "learning_rate": 0.00017740692180163908,
+ "loss": 1.7122,
+ "step": 654
+ },
+ {
+ "epoch": 0.6551842705760995,
+ "grad_norm": 0.8033855557441711,
+ "learning_rate": 0.00017734051474198003,
+ "loss": 1.6095,
+ "step": 655
+ },
+ {
+ "epoch": 0.6561845519052233,
+ "grad_norm": 0.7568691372871399,
+ "learning_rate": 0.0001772740226990823,
+ "loss": 1.6783,
+ "step": 656
+ },
+ {
+ "epoch": 0.6571848332343472,
+ "grad_norm": 0.7288162708282471,
+ "learning_rate": 0.00017720744574600863,
+ "loss": 1.695,
+ "step": 657
+ },
+ {
+ "epoch": 0.658185114563471,
+ "grad_norm": 0.6898120045661926,
+ "learning_rate": 0.00017714078395591502,
+ "loss": 1.6539,
+ "step": 658
+ },
+ {
+ "epoch": 0.6591853958925948,
+ "grad_norm": 0.6977367997169495,
+ "learning_rate": 0.00017707403740205071,
+ "loss": 1.4558,
+ "step": 659
+ },
+ {
+ "epoch": 0.6601856772217186,
+ "grad_norm": 0.6594682335853577,
+ "learning_rate": 0.00017700720615775812,
+ "loss": 1.56,
+ "step": 660
+ },
+ {
+ "epoch": 0.6611859585508424,
+ "grad_norm": 0.6146736741065979,
+ "learning_rate": 0.0001769402902964727,
+ "loss": 1.7014,
+ "step": 661
+ },
+ {
+ "epoch": 0.6621862398799663,
+ "grad_norm": 0.7182234525680542,
+ "learning_rate": 0.00017687328989172288,
+ "loss": 1.5655,
+ "step": 662
+ },
+ {
+ "epoch": 0.66318652120909,
+ "grad_norm": 0.6940692067146301,
+ "learning_rate": 0.00017680620501712996,
+ "loss": 1.6177,
+ "step": 663
+ },
+ {
+ "epoch": 0.6641868025382138,
+ "grad_norm": 0.7672961950302124,
+ "learning_rate": 0.00017673903574640814,
+ "loss": 1.559,
+ "step": 664
+ },
+ {
+ "epoch": 0.6651870838673377,
+ "grad_norm": 0.654500424861908,
+ "learning_rate": 0.00017667178215336423,
+ "loss": 1.5024,
+ "step": 665
+ },
+ {
+ "epoch": 0.6661873651964615,
+ "grad_norm": 0.8137261867523193,
+ "learning_rate": 0.0001766044443118978,
+ "loss": 1.7865,
+ "step": 666
+ },
+ {
+ "epoch": 0.6671876465255854,
+ "grad_norm": 0.806624710559845,
+ "learning_rate": 0.000176537022296001,
+ "loss": 1.4944,
+ "step": 667
+ },
+ {
+ "epoch": 0.6681879278547092,
+ "grad_norm": 0.7952747941017151,
+ "learning_rate": 0.00017646951617975837,
+ "loss": 1.5371,
+ "step": 668
+ },
+ {
+ "epoch": 0.6691882091838329,
+ "grad_norm": 0.6380738615989685,
+ "learning_rate": 0.00017640192603734692,
+ "loss": 1.3117,
+ "step": 669
+ },
+ {
+ "epoch": 0.6701884905129568,
+ "grad_norm": 0.6559002995491028,
+ "learning_rate": 0.00017633425194303606,
+ "loss": 1.3662,
+ "step": 670
+ },
+ {
+ "epoch": 0.6711887718420806,
+ "grad_norm": 0.715826153755188,
+ "learning_rate": 0.00017626649397118734,
+ "loss": 1.7271,
+ "step": 671
+ },
+ {
+ "epoch": 0.6721890531712044,
+ "grad_norm": 0.6719872355461121,
+ "learning_rate": 0.00017619865219625452,
+ "loss": 1.747,
+ "step": 672
+ },
+ {
+ "epoch": 0.6731893345003283,
+ "grad_norm": 0.6901715397834778,
+ "learning_rate": 0.00017613072669278343,
+ "loss": 1.6438,
+ "step": 673
+ },
+ {
+ "epoch": 0.674189615829452,
+ "grad_norm": 0.6601479649543762,
+ "learning_rate": 0.00017606271753541192,
+ "loss": 1.8191,
+ "step": 674
+ },
+ {
+ "epoch": 0.6751898971585758,
+ "grad_norm": 0.8059187531471252,
+ "learning_rate": 0.00017599462479886974,
+ "loss": 1.6946,
+ "step": 675
+ },
+ {
+ "epoch": 0.6761901784876997,
+ "grad_norm": 0.6966856718063354,
+ "learning_rate": 0.00017592644855797854,
+ "loss": 1.5551,
+ "step": 676
+ },
+ {
+ "epoch": 0.6771904598168235,
+ "grad_norm": 0.7306144833564758,
+ "learning_rate": 0.00017585818888765168,
+ "loss": 1.5429,
+ "step": 677
+ },
+ {
+ "epoch": 0.6781907411459474,
+ "grad_norm": 0.572907030582428,
+ "learning_rate": 0.0001757898458628941,
+ "loss": 1.4437,
+ "step": 678
+ },
+ {
+ "epoch": 0.6791910224750711,
+ "grad_norm": 0.6807466149330139,
+ "learning_rate": 0.00017572141955880252,
+ "loss": 1.6307,
+ "step": 679
+ },
+ {
+ "epoch": 0.6801913038041949,
+ "grad_norm": 0.7529204487800598,
+ "learning_rate": 0.00017565291005056504,
+ "loss": 1.631,
+ "step": 680
+ },
+ {
+ "epoch": 0.6811915851333188,
+ "grad_norm": 0.6292940378189087,
+ "learning_rate": 0.00017558431741346122,
+ "loss": 1.7512,
+ "step": 681
+ },
+ {
+ "epoch": 0.6821918664624426,
+ "grad_norm": 0.7981480956077576,
+ "learning_rate": 0.00017551564172286197,
+ "loss": 1.7704,
+ "step": 682
+ },
+ {
+ "epoch": 0.6831921477915663,
+ "grad_norm": 0.7816259860992432,
+ "learning_rate": 0.00017544688305422943,
+ "loss": 1.4954,
+ "step": 683
+ },
+ {
+ "epoch": 0.6841924291206902,
+ "grad_norm": 0.6866456866264343,
+ "learning_rate": 0.00017537804148311695,
+ "loss": 1.7986,
+ "step": 684
+ },
+ {
+ "epoch": 0.685192710449814,
+ "grad_norm": 0.7499064803123474,
+ "learning_rate": 0.00017530911708516902,
+ "loss": 1.6472,
+ "step": 685
+ },
+ {
+ "epoch": 0.6861929917789378,
+ "grad_norm": 0.5923457145690918,
+ "learning_rate": 0.00017524010993612098,
+ "loss": 1.4866,
+ "step": 686
+ },
+ {
+ "epoch": 0.6871932731080617,
+ "grad_norm": 0.6991822719573975,
+ "learning_rate": 0.00017517102011179933,
+ "loss": 1.605,
+ "step": 687
+ },
+ {
+ "epoch": 0.6881935544371854,
+ "grad_norm": 0.7880247235298157,
+ "learning_rate": 0.0001751018476881212,
+ "loss": 1.641,
+ "step": 688
+ },
+ {
+ "epoch": 0.6891938357663093,
+ "grad_norm": 0.7848097085952759,
+ "learning_rate": 0.00017503259274109464,
+ "loss": 1.7505,
+ "step": 689
+ },
+ {
+ "epoch": 0.6901941170954331,
+ "grad_norm": 0.693678081035614,
+ "learning_rate": 0.00017496325534681825,
+ "loss": 1.6565,
+ "step": 690
+ },
+ {
+ "epoch": 0.6911943984245569,
+ "grad_norm": 0.8232877254486084,
+ "learning_rate": 0.00017489383558148136,
+ "loss": 1.7664,
+ "step": 691
+ },
+ {
+ "epoch": 0.6921946797536808,
+ "grad_norm": 0.7834855914115906,
+ "learning_rate": 0.00017482433352136365,
+ "loss": 1.4381,
+ "step": 692
+ },
+ {
+ "epoch": 0.6931949610828045,
+ "grad_norm": 0.6186713576316833,
+ "learning_rate": 0.00017475474924283536,
+ "loss": 1.6482,
+ "step": 693
+ },
+ {
+ "epoch": 0.6941952424119283,
+ "grad_norm": 0.7511133551597595,
+ "learning_rate": 0.00017468508282235704,
+ "loss": 1.6186,
+ "step": 694
+ },
+ {
+ "epoch": 0.6951955237410522,
+ "grad_norm": 0.8017745614051819,
+ "learning_rate": 0.00017461533433647946,
+ "loss": 1.6597,
+ "step": 695
+ },
+ {
+ "epoch": 0.696195805070176,
+ "grad_norm": 0.8190794587135315,
+ "learning_rate": 0.00017454550386184362,
+ "loss": 1.6602,
+ "step": 696
+ },
+ {
+ "epoch": 0.6971960863992998,
+ "grad_norm": 0.7479042410850525,
+ "learning_rate": 0.00017447559147518055,
+ "loss": 1.77,
+ "step": 697
+ },
+ {
+ "epoch": 0.6981963677284236,
+ "grad_norm": 0.7239962816238403,
+ "learning_rate": 0.00017440559725331135,
+ "loss": 1.5838,
+ "step": 698
+ },
+ {
+ "epoch": 0.6991966490575474,
+ "grad_norm": 0.7252762317657471,
+ "learning_rate": 0.000174335521273147,
+ "loss": 1.5462,
+ "step": 699
+ },
+ {
+ "epoch": 0.7001969303866713,
+ "grad_norm": 0.9383960962295532,
+ "learning_rate": 0.00017426536361168834,
+ "loss": 1.5104,
+ "step": 700
+ },
+ {
+ "epoch": 0.7011972117157951,
+ "grad_norm": 0.6944159269332886,
+ "learning_rate": 0.00017419512434602594,
+ "loss": 1.6382,
+ "step": 701
+ },
+ {
+ "epoch": 0.7021974930449189,
+ "grad_norm": 0.6809273362159729,
+ "learning_rate": 0.00017412480355334005,
+ "loss": 1.725,
+ "step": 702
+ },
+ {
+ "epoch": 0.7031977743740427,
+ "grad_norm": 0.7521125674247742,
+ "learning_rate": 0.00017405440131090048,
+ "loss": 1.8499,
+ "step": 703
+ },
+ {
+ "epoch": 0.7041980557031665,
+ "grad_norm": 0.6854100227355957,
+ "learning_rate": 0.00017398391769606658,
+ "loss": 1.6648,
+ "step": 704
+ },
+ {
+ "epoch": 0.7051983370322903,
+ "grad_norm": 0.7382327318191528,
+ "learning_rate": 0.00017391335278628712,
+ "loss": 1.5806,
+ "step": 705
+ },
+ {
+ "epoch": 0.7061986183614142,
+ "grad_norm": 0.7387582063674927,
+ "learning_rate": 0.00017384270665910014,
+ "loss": 1.5563,
+ "step": 706
+ },
+ {
+ "epoch": 0.707198899690538,
+ "grad_norm": 0.7698972821235657,
+ "learning_rate": 0.000173771979392133,
+ "loss": 1.6626,
+ "step": 707
+ },
+ {
+ "epoch": 0.7081991810196617,
+ "grad_norm": 0.7639899849891663,
+ "learning_rate": 0.00017370117106310214,
+ "loss": 1.6725,
+ "step": 708
+ },
+ {
+ "epoch": 0.7091994623487856,
+ "grad_norm": 0.6684393286705017,
+ "learning_rate": 0.0001736302817498131,
+ "loss": 1.64,
+ "step": 709
+ },
+ {
+ "epoch": 0.7101997436779094,
+ "grad_norm": 0.6329504251480103,
+ "learning_rate": 0.00017355931153016044,
+ "loss": 1.4472,
+ "step": 710
+ },
+ {
+ "epoch": 0.7112000250070333,
+ "grad_norm": 0.8133587837219238,
+ "learning_rate": 0.0001734882604821276,
+ "loss": 1.7971,
+ "step": 711
+ },
+ {
+ "epoch": 0.712200306336157,
+ "grad_norm": 0.6524143218994141,
+ "learning_rate": 0.0001734171286837868,
+ "loss": 1.5366,
+ "step": 712
+ },
+ {
+ "epoch": 0.7132005876652808,
+ "grad_norm": 0.6714311242103577,
+ "learning_rate": 0.00017334591621329906,
+ "loss": 1.841,
+ "step": 713
+ },
+ {
+ "epoch": 0.7142008689944047,
+ "grad_norm": 0.6690782904624939,
+ "learning_rate": 0.00017327462314891402,
+ "loss": 1.623,
+ "step": 714
+ },
+ {
+ "epoch": 0.7152011503235285,
+ "grad_norm": 0.650442361831665,
+ "learning_rate": 0.00017320324956896977,
+ "loss": 1.6124,
+ "step": 715
+ },
+ {
+ "epoch": 0.7162014316526523,
+ "grad_norm": 0.7075713276863098,
+ "learning_rate": 0.00017313179555189306,
+ "loss": 1.5154,
+ "step": 716
+ },
+ {
+ "epoch": 0.7172017129817762,
+ "grad_norm": 0.729060173034668,
+ "learning_rate": 0.00017306026117619889,
+ "loss": 1.7072,
+ "step": 717
+ },
+ {
+ "epoch": 0.7182019943108999,
+ "grad_norm": 0.8547433614730835,
+ "learning_rate": 0.0001729886465204906,
+ "loss": 1.6237,
+ "step": 718
+ },
+ {
+ "epoch": 0.7192022756400237,
+ "grad_norm": 0.6729336380958557,
+ "learning_rate": 0.0001729169516634598,
+ "loss": 1.7769,
+ "step": 719
+ },
+ {
+ "epoch": 0.7202025569691476,
+ "grad_norm": 0.7437167167663574,
+ "learning_rate": 0.0001728451766838861,
+ "loss": 1.5056,
+ "step": 720
+ },
+ {
+ "epoch": 0.7212028382982714,
+ "grad_norm": 0.6573147177696228,
+ "learning_rate": 0.00017277332166063726,
+ "loss": 1.7694,
+ "step": 721
+ },
+ {
+ "epoch": 0.7222031196273953,
+ "grad_norm": 0.6767126321792603,
+ "learning_rate": 0.00017270138667266894,
+ "loss": 1.6014,
+ "step": 722
+ },
+ {
+ "epoch": 0.723203400956519,
+ "grad_norm": 0.7488179206848145,
+ "learning_rate": 0.00017262937179902472,
+ "loss": 1.573,
+ "step": 723
+ },
+ {
+ "epoch": 0.7242036822856428,
+ "grad_norm": 0.6491002440452576,
+ "learning_rate": 0.00017255727711883588,
+ "loss": 1.6705,
+ "step": 724
+ },
+ {
+ "epoch": 0.7252039636147667,
+ "grad_norm": 0.764090359210968,
+ "learning_rate": 0.00017248510271132144,
+ "loss": 1.6761,
+ "step": 725
+ },
+ {
+ "epoch": 0.7262042449438905,
+ "grad_norm": 0.7116997838020325,
+ "learning_rate": 0.00017241284865578802,
+ "loss": 1.7435,
+ "step": 726
+ },
+ {
+ "epoch": 0.7272045262730142,
+ "grad_norm": 0.6367645859718323,
+ "learning_rate": 0.00017234051503162978,
+ "loss": 1.7061,
+ "step": 727
+ },
+ {
+ "epoch": 0.7282048076021381,
+ "grad_norm": 0.7232155203819275,
+ "learning_rate": 0.0001722681019183283,
+ "loss": 1.8142,
+ "step": 728
+ },
+ {
+ "epoch": 0.7292050889312619,
+ "grad_norm": 0.7533649802207947,
+ "learning_rate": 0.00017219560939545246,
+ "loss": 1.8202,
+ "step": 729
+ },
+ {
+ "epoch": 0.7302053702603858,
+ "grad_norm": 0.6923018097877502,
+ "learning_rate": 0.00017212303754265843,
+ "loss": 1.4925,
+ "step": 730
+ },
+ {
+ "epoch": 0.7312056515895096,
+ "grad_norm": 0.7326932549476624,
+ "learning_rate": 0.0001720503864396896,
+ "loss": 1.5192,
+ "step": 731
+ },
+ {
+ "epoch": 0.7322059329186333,
+ "grad_norm": 0.7220762968063354,
+ "learning_rate": 0.00017197765616637636,
+ "loss": 1.7601,
+ "step": 732
+ },
+ {
+ "epoch": 0.7332062142477572,
+ "grad_norm": 0.605725884437561,
+ "learning_rate": 0.0001719048468026361,
+ "loss": 1.6309,
+ "step": 733
+ },
+ {
+ "epoch": 0.734206495576881,
+ "grad_norm": 0.6728388667106628,
+ "learning_rate": 0.00017183195842847322,
+ "loss": 1.5993,
+ "step": 734
+ },
+ {
+ "epoch": 0.7352067769060048,
+ "grad_norm": 0.7035244703292847,
+ "learning_rate": 0.0001717589911239788,
+ "loss": 1.6031,
+ "step": 735
+ },
+ {
+ "epoch": 0.7362070582351287,
+ "grad_norm": 0.7473010420799255,
+ "learning_rate": 0.00017168594496933074,
+ "loss": 1.5833,
+ "step": 736
+ },
+ {
+ "epoch": 0.7372073395642524,
+ "grad_norm": 0.6310701370239258,
+ "learning_rate": 0.00017161282004479351,
+ "loss": 1.4328,
+ "step": 737
+ },
+ {
+ "epoch": 0.7382076208933762,
+ "grad_norm": 0.6805673837661743,
+ "learning_rate": 0.0001715396164307182,
+ "loss": 1.5429,
+ "step": 738
+ },
+ {
+ "epoch": 0.7392079022225001,
+ "grad_norm": 0.747222900390625,
+ "learning_rate": 0.0001714663342075424,
+ "loss": 1.7696,
+ "step": 739
+ },
+ {
+ "epoch": 0.7402081835516239,
+ "grad_norm": 0.8214403390884399,
+ "learning_rate": 0.00017139297345578994,
+ "loss": 1.5997,
+ "step": 740
+ },
+ {
+ "epoch": 0.7412084648807478,
+ "grad_norm": 0.6722521781921387,
+ "learning_rate": 0.00017131953425607104,
+ "loss": 1.5287,
+ "step": 741
+ },
+ {
+ "epoch": 0.7422087462098715,
+ "grad_norm": 0.6937971115112305,
+ "learning_rate": 0.00017124601668908212,
+ "loss": 1.7263,
+ "step": 742
+ },
+ {
+ "epoch": 0.7432090275389953,
+ "grad_norm": 0.7590844631195068,
+ "learning_rate": 0.00017117242083560568,
+ "loss": 1.7263,
+ "step": 743
+ },
+ {
+ "epoch": 0.7442093088681192,
+ "grad_norm": 0.7913306355476379,
+ "learning_rate": 0.00017109874677651024,
+ "loss": 1.7646,
+ "step": 744
+ },
+ {
+ "epoch": 0.745209590197243,
+ "grad_norm": 0.7123669385910034,
+ "learning_rate": 0.0001710249945927503,
+ "loss": 1.6768,
+ "step": 745
+ },
+ {
+ "epoch": 0.7462098715263668,
+ "grad_norm": 0.8426288366317749,
+ "learning_rate": 0.00017095116436536612,
+ "loss": 1.8496,
+ "step": 746
+ },
+ {
+ "epoch": 0.7472101528554906,
+ "grad_norm": 0.6152015328407288,
+ "learning_rate": 0.00017087725617548385,
+ "loss": 1.4527,
+ "step": 747
+ },
+ {
+ "epoch": 0.7482104341846144,
+ "grad_norm": 0.8348223567008972,
+ "learning_rate": 0.00017080327010431513,
+ "loss": 1.4847,
+ "step": 748
+ },
+ {
+ "epoch": 0.7492107155137382,
+ "grad_norm": 0.7883800268173218,
+ "learning_rate": 0.00017072920623315734,
+ "loss": 1.5941,
+ "step": 749
+ },
+ {
+ "epoch": 0.7502109968428621,
+ "grad_norm": 0.6957768201828003,
+ "learning_rate": 0.00017065506464339326,
+ "loss": 1.7543,
+ "step": 750
+ },
+ {
+ "epoch": 0.7512112781719859,
+ "grad_norm": 0.5898700952529907,
+ "learning_rate": 0.00017058084541649106,
+ "loss": 1.7859,
+ "step": 751
+ },
+ {
+ "epoch": 0.7522115595011097,
+ "grad_norm": 0.6882239580154419,
+ "learning_rate": 0.00017050654863400429,
+ "loss": 1.3233,
+ "step": 752
+ },
+ {
+ "epoch": 0.7532118408302335,
+ "grad_norm": 0.7327316999435425,
+ "learning_rate": 0.00017043217437757164,
+ "loss": 1.5067,
+ "step": 753
+ },
+ {
+ "epoch": 0.7542121221593573,
+ "grad_norm": 0.9257964491844177,
+ "learning_rate": 0.00017035772272891702,
+ "loss": 1.503,
+ "step": 754
+ },
+ {
+ "epoch": 0.7552124034884812,
+ "grad_norm": 0.7924116253852844,
+ "learning_rate": 0.00017028319376984928,
+ "loss": 1.8975,
+ "step": 755
+ },
+ {
+ "epoch": 0.756212684817605,
+ "grad_norm": 0.6651099920272827,
+ "learning_rate": 0.00017020858758226229,
+ "loss": 1.649,
+ "step": 756
+ },
+ {
+ "epoch": 0.7572129661467287,
+ "grad_norm": 0.7257362604141235,
+ "learning_rate": 0.0001701339042481347,
+ "loss": 1.6919,
+ "step": 757
+ },
+ {
+ "epoch": 0.7582132474758526,
+ "grad_norm": 0.8733739852905273,
+ "learning_rate": 0.00017005914384953007,
+ "loss": 1.5929,
+ "step": 758
+ },
+ {
+ "epoch": 0.7592135288049764,
+ "grad_norm": 0.6347383856773376,
+ "learning_rate": 0.00016998430646859654,
+ "loss": 1.3341,
+ "step": 759
+ },
+ {
+ "epoch": 0.7602138101341002,
+ "grad_norm": 0.6915012001991272,
+ "learning_rate": 0.00016990939218756683,
+ "loss": 1.4971,
+ "step": 760
+ },
+ {
+ "epoch": 0.761214091463224,
+ "grad_norm": 0.7862069606781006,
+ "learning_rate": 0.0001698344010887582,
+ "loss": 1.7468,
+ "step": 761
+ },
+ {
+ "epoch": 0.7622143727923478,
+ "grad_norm": 0.7318029403686523,
+ "learning_rate": 0.0001697593332545723,
+ "loss": 1.8143,
+ "step": 762
+ },
+ {
+ "epoch": 0.7632146541214717,
+ "grad_norm": 0.6758155226707458,
+ "learning_rate": 0.0001696841887674951,
+ "loss": 1.6652,
+ "step": 763
+ },
+ {
+ "epoch": 0.7642149354505955,
+ "grad_norm": 0.6853237748146057,
+ "learning_rate": 0.00016960896771009684,
+ "loss": 1.5176,
+ "step": 764
+ },
+ {
+ "epoch": 0.7652152167797193,
+ "grad_norm": 0.9686934351921082,
+ "learning_rate": 0.00016953367016503182,
+ "loss": 1.5366,
+ "step": 765
+ },
+ {
+ "epoch": 0.7662154981088432,
+ "grad_norm": 0.7232028841972351,
+ "learning_rate": 0.00016945829621503838,
+ "loss": 1.6932,
+ "step": 766
+ },
+ {
+ "epoch": 0.7672157794379669,
+ "grad_norm": 0.6606596112251282,
+ "learning_rate": 0.00016938284594293897,
+ "loss": 1.7051,
+ "step": 767
+ },
+ {
+ "epoch": 0.7682160607670907,
+ "grad_norm": 0.6337714195251465,
+ "learning_rate": 0.00016930731943163972,
+ "loss": 1.6505,
+ "step": 768
+ },
+ {
+ "epoch": 0.7692163420962146,
+ "grad_norm": 0.6292264461517334,
+ "learning_rate": 0.00016923171676413063,
+ "loss": 1.7207,
+ "step": 769
+ },
+ {
+ "epoch": 0.7702166234253384,
+ "grad_norm": 0.7183407545089722,
+ "learning_rate": 0.00016915603802348535,
+ "loss": 1.7025,
+ "step": 770
+ },
+ {
+ "epoch": 0.7712169047544621,
+ "grad_norm": 0.805107593536377,
+ "learning_rate": 0.00016908028329286112,
+ "loss": 1.592,
+ "step": 771
+ },
+ {
+ "epoch": 0.772217186083586,
+ "grad_norm": 0.725777804851532,
+ "learning_rate": 0.0001690044526554987,
+ "loss": 1.6714,
+ "step": 772
+ },
+ {
+ "epoch": 0.7732174674127098,
+ "grad_norm": 0.6801775097846985,
+ "learning_rate": 0.00016892854619472223,
+ "loss": 1.5047,
+ "step": 773
+ },
+ {
+ "epoch": 0.7742177487418337,
+ "grad_norm": 0.7701449990272522,
+ "learning_rate": 0.00016885256399393924,
+ "loss": 1.5506,
+ "step": 774
+ },
+ {
+ "epoch": 0.7752180300709575,
+ "grad_norm": 0.6954746842384338,
+ "learning_rate": 0.00016877650613664034,
+ "loss": 1.4859,
+ "step": 775
+ },
+ {
+ "epoch": 0.7762183114000812,
+ "grad_norm": 0.7431885004043579,
+ "learning_rate": 0.00016870037270639942,
+ "loss": 1.6087,
+ "step": 776
+ },
+ {
+ "epoch": 0.7772185927292051,
+ "grad_norm": 0.687329113483429,
+ "learning_rate": 0.0001686241637868734,
+ "loss": 1.7038,
+ "step": 777
+ },
+ {
+ "epoch": 0.7782188740583289,
+ "grad_norm": 0.6656787395477295,
+ "learning_rate": 0.00016854787946180198,
+ "loss": 1.5691,
+ "step": 778
+ },
+ {
+ "epoch": 0.7792191553874527,
+ "grad_norm": 0.7476064562797546,
+ "learning_rate": 0.00016847151981500789,
+ "loss": 1.4972,
+ "step": 779
+ },
+ {
+ "epoch": 0.7802194367165766,
+ "grad_norm": 0.7320332527160645,
+ "learning_rate": 0.00016839508493039657,
+ "loss": 1.7326,
+ "step": 780
+ },
+ {
+ "epoch": 0.7812197180457003,
+ "grad_norm": 0.6432293057441711,
+ "learning_rate": 0.00016831857489195618,
+ "loss": 1.542,
+ "step": 781
+ },
+ {
+ "epoch": 0.7822199993748241,
+ "grad_norm": 0.6751729846000671,
+ "learning_rate": 0.00016824198978375736,
+ "loss": 1.6864,
+ "step": 782
+ },
+ {
+ "epoch": 0.783220280703948,
+ "grad_norm": 0.770193338394165,
+ "learning_rate": 0.00016816532968995328,
+ "loss": 1.5318,
+ "step": 783
+ },
+ {
+ "epoch": 0.7842205620330718,
+ "grad_norm": 0.6820619106292725,
+ "learning_rate": 0.0001680885946947796,
+ "loss": 1.6004,
+ "step": 784
+ },
+ {
+ "epoch": 0.7852208433621957,
+ "grad_norm": 0.9120951294898987,
+ "learning_rate": 0.00016801178488255413,
+ "loss": 1.6506,
+ "step": 785
+ },
+ {
+ "epoch": 0.7862211246913194,
+ "grad_norm": 0.7819542288780212,
+ "learning_rate": 0.00016793490033767698,
+ "loss": 1.5292,
+ "step": 786
+ },
+ {
+ "epoch": 0.7872214060204432,
+ "grad_norm": 0.6647278666496277,
+ "learning_rate": 0.00016785794114463037,
+ "loss": 1.5941,
+ "step": 787
+ },
+ {
+ "epoch": 0.7882216873495671,
+ "grad_norm": 0.6874713897705078,
+ "learning_rate": 0.00016778090738797853,
+ "loss": 1.5543,
+ "step": 788
+ },
+ {
+ "epoch": 0.7892219686786909,
+ "grad_norm": 0.7759424448013306,
+ "learning_rate": 0.00016770379915236766,
+ "loss": 1.6788,
+ "step": 789
+ },
+ {
+ "epoch": 0.7902222500078147,
+ "grad_norm": 0.724583625793457,
+ "learning_rate": 0.00016762661652252567,
+ "loss": 1.5998,
+ "step": 790
+ },
+ {
+ "epoch": 0.7912225313369385,
+ "grad_norm": 0.7921720743179321,
+ "learning_rate": 0.00016754935958326244,
+ "loss": 1.5956,
+ "step": 791
+ },
+ {
+ "epoch": 0.7922228126660623,
+ "grad_norm": 0.6484968662261963,
+ "learning_rate": 0.00016747202841946928,
+ "loss": 1.5708,
+ "step": 792
+ },
+ {
+ "epoch": 0.7932230939951862,
+ "grad_norm": 0.6372153759002686,
+ "learning_rate": 0.00016739462311611919,
+ "loss": 1.5213,
+ "step": 793
+ },
+ {
+ "epoch": 0.79422337532431,
+ "grad_norm": 0.7025095224380493,
+ "learning_rate": 0.00016731714375826657,
+ "loss": 1.4701,
+ "step": 794
+ },
+ {
+ "epoch": 0.7952236566534338,
+ "grad_norm": 0.681094765663147,
+ "learning_rate": 0.00016723959043104728,
+ "loss": 1.5101,
+ "step": 795
+ },
+ {
+ "epoch": 0.7962239379825576,
+ "grad_norm": 0.7129995822906494,
+ "learning_rate": 0.00016716196321967832,
+ "loss": 1.6038,
+ "step": 796
+ },
+ {
+ "epoch": 0.7972242193116814,
+ "grad_norm": 0.7403759360313416,
+ "learning_rate": 0.00016708426220945802,
+ "loss": 1.5906,
+ "step": 797
+ },
+ {
+ "epoch": 0.7982245006408052,
+ "grad_norm": 0.6562372446060181,
+ "learning_rate": 0.00016700648748576574,
+ "loss": 1.6469,
+ "step": 798
+ },
+ {
+ "epoch": 0.7992247819699291,
+ "grad_norm": 0.839885413646698,
+ "learning_rate": 0.0001669286391340618,
+ "loss": 1.5385,
+ "step": 799
+ },
+ {
+ "epoch": 0.8002250632990529,
+ "grad_norm": 0.8687535524368286,
+ "learning_rate": 0.00016685071723988748,
+ "loss": 1.6759,
+ "step": 800
+ },
+ {
+ "epoch": 0.8012253446281766,
+ "grad_norm": 0.6825409531593323,
+ "learning_rate": 0.00016677272188886483,
+ "loss": 1.841,
+ "step": 801
+ },
+ {
+ "epoch": 0.8022256259573005,
+ "grad_norm": 0.6831037402153015,
+ "learning_rate": 0.00016669465316669667,
+ "loss": 1.5476,
+ "step": 802
+ },
+ {
+ "epoch": 0.8032259072864243,
+ "grad_norm": 0.6906002759933472,
+ "learning_rate": 0.00016661651115916642,
+ "loss": 1.6866,
+ "step": 803
+ },
+ {
+ "epoch": 0.8042261886155482,
+ "grad_norm": 0.7675560116767883,
+ "learning_rate": 0.00016653829595213794,
+ "loss": 1.5663,
+ "step": 804
+ },
+ {
+ "epoch": 0.805226469944672,
+ "grad_norm": 0.6594063639640808,
+ "learning_rate": 0.00016646000763155568,
+ "loss": 1.5247,
+ "step": 805
+ },
+ {
+ "epoch": 0.8062267512737957,
+ "grad_norm": 0.7470384836196899,
+ "learning_rate": 0.00016638164628344425,
+ "loss": 1.6468,
+ "step": 806
+ },
+ {
+ "epoch": 0.8072270326029196,
+ "grad_norm": 0.6874479651451111,
+ "learning_rate": 0.00016630321199390867,
+ "loss": 1.5948,
+ "step": 807
+ },
+ {
+ "epoch": 0.8082273139320434,
+ "grad_norm": 0.7301204204559326,
+ "learning_rate": 0.00016622470484913406,
+ "loss": 1.3922,
+ "step": 808
+ },
+ {
+ "epoch": 0.8092275952611672,
+ "grad_norm": 0.6781039834022522,
+ "learning_rate": 0.00016614612493538551,
+ "loss": 1.6054,
+ "step": 809
+ },
+ {
+ "epoch": 0.810227876590291,
+ "grad_norm": 0.6913226246833801,
+ "learning_rate": 0.00016606747233900815,
+ "loss": 1.5754,
+ "step": 810
+ },
+ {
+ "epoch": 0.8112281579194148,
+ "grad_norm": 0.667425811290741,
+ "learning_rate": 0.00016598874714642697,
+ "loss": 1.8492,
+ "step": 811
+ },
+ {
+ "epoch": 0.8122284392485386,
+ "grad_norm": 0.7662241458892822,
+ "learning_rate": 0.00016590994944414678,
+ "loss": 1.8034,
+ "step": 812
+ },
+ {
+ "epoch": 0.8132287205776625,
+ "grad_norm": 0.7574827075004578,
+ "learning_rate": 0.00016583107931875192,
+ "loss": 1.7435,
+ "step": 813
+ },
+ {
+ "epoch": 0.8142290019067863,
+ "grad_norm": 0.9005519151687622,
+ "learning_rate": 0.0001657521368569064,
+ "loss": 1.6769,
+ "step": 814
+ },
+ {
+ "epoch": 0.8152292832359102,
+ "grad_norm": 0.6895585656166077,
+ "learning_rate": 0.0001656731221453537,
+ "loss": 1.7562,
+ "step": 815
+ },
+ {
+ "epoch": 0.8162295645650339,
+ "grad_norm": 0.7573346495628357,
+ "learning_rate": 0.00016559403527091675,
+ "loss": 1.4748,
+ "step": 816
+ },
+ {
+ "epoch": 0.8172298458941577,
+ "grad_norm": 0.7698647975921631,
+ "learning_rate": 0.0001655148763204977,
+ "loss": 1.6174,
+ "step": 817
+ },
+ {
+ "epoch": 0.8182301272232816,
+ "grad_norm": 0.7975410223007202,
+ "learning_rate": 0.00016543564538107797,
+ "loss": 1.7924,
+ "step": 818
+ },
+ {
+ "epoch": 0.8192304085524054,
+ "grad_norm": 0.9687625169754028,
+ "learning_rate": 0.00016535634253971794,
+ "loss": 1.7725,
+ "step": 819
+ },
+ {
+ "epoch": 0.8202306898815291,
+ "grad_norm": 0.6777274012565613,
+ "learning_rate": 0.00016527696788355714,
+ "loss": 1.5018,
+ "step": 820
+ },
+ {
+ "epoch": 0.821230971210653,
+ "grad_norm": 0.6990464329719543,
+ "learning_rate": 0.00016519752149981397,
+ "loss": 1.5804,
+ "step": 821
+ },
+ {
+ "epoch": 0.8222312525397768,
+ "grad_norm": 0.8445940613746643,
+ "learning_rate": 0.0001651180034757856,
+ "loss": 1.8591,
+ "step": 822
+ },
+ {
+ "epoch": 0.8232315338689006,
+ "grad_norm": 0.8462644815444946,
+ "learning_rate": 0.00016503841389884798,
+ "loss": 1.7582,
+ "step": 823
+ },
+ {
+ "epoch": 0.8242318151980245,
+ "grad_norm": 0.7679311037063599,
+ "learning_rate": 0.00016495875285645566,
+ "loss": 1.5971,
+ "step": 824
+ },
+ {
+ "epoch": 0.8252320965271482,
+ "grad_norm": 0.7734447717666626,
+ "learning_rate": 0.00016487902043614173,
+ "loss": 1.714,
+ "step": 825
+ },
+ {
+ "epoch": 0.8262323778562721,
+ "grad_norm": 0.7890239953994751,
+ "learning_rate": 0.0001647992167255177,
+ "loss": 1.6876,
+ "step": 826
+ },
+ {
+ "epoch": 0.8272326591853959,
+ "grad_norm": 0.8530203104019165,
+ "learning_rate": 0.0001647193418122734,
+ "loss": 1.9096,
+ "step": 827
+ },
+ {
+ "epoch": 0.8282329405145197,
+ "grad_norm": 0.7828260064125061,
+ "learning_rate": 0.00016463939578417692,
+ "loss": 1.5518,
+ "step": 828
+ },
+ {
+ "epoch": 0.8292332218436436,
+ "grad_norm": 0.7015512585639954,
+ "learning_rate": 0.0001645593787290745,
+ "loss": 1.49,
+ "step": 829
+ },
+ {
+ "epoch": 0.8302335031727673,
+ "grad_norm": 0.694771409034729,
+ "learning_rate": 0.0001644792907348904,
+ "loss": 1.5506,
+ "step": 830
+ },
+ {
+ "epoch": 0.8312337845018911,
+ "grad_norm": 0.8167857527732849,
+ "learning_rate": 0.00016439913188962685,
+ "loss": 1.7798,
+ "step": 831
+ },
+ {
+ "epoch": 0.832234065831015,
+ "grad_norm": 0.6682108044624329,
+ "learning_rate": 0.0001643189022813639,
+ "loss": 1.6107,
+ "step": 832
+ },
+ {
+ "epoch": 0.8332343471601388,
+ "grad_norm": 0.8347259163856506,
+ "learning_rate": 0.0001642386019982594,
+ "loss": 1.7672,
+ "step": 833
+ },
+ {
+ "epoch": 0.8342346284892626,
+ "grad_norm": 0.6620945334434509,
+ "learning_rate": 0.00016415823112854883,
+ "loss": 1.6975,
+ "step": 834
+ },
+ {
+ "epoch": 0.8352349098183864,
+ "grad_norm": 0.7286327481269836,
+ "learning_rate": 0.00016407778976054526,
+ "loss": 1.5956,
+ "step": 835
+ },
+ {
+ "epoch": 0.8362351911475102,
+ "grad_norm": 0.6344440579414368,
+ "learning_rate": 0.0001639972779826392,
+ "loss": 1.6455,
+ "step": 836
+ },
+ {
+ "epoch": 0.8372354724766341,
+ "grad_norm": 0.6607793569564819,
+ "learning_rate": 0.0001639166958832985,
+ "loss": 1.6739,
+ "step": 837
+ },
+ {
+ "epoch": 0.8382357538057579,
+ "grad_norm": 0.6973574161529541,
+ "learning_rate": 0.00016383604355106837,
+ "loss": 1.8042,
+ "step": 838
+ },
+ {
+ "epoch": 0.8392360351348817,
+ "grad_norm": 0.7744210958480835,
+ "learning_rate": 0.00016375532107457108,
+ "loss": 1.528,
+ "step": 839
+ },
+ {
+ "epoch": 0.8402363164640055,
+ "grad_norm": 0.6944973468780518,
+ "learning_rate": 0.00016367452854250603,
+ "loss": 1.5498,
+ "step": 840
+ },
+ {
+ "epoch": 0.8412365977931293,
+ "grad_norm": 0.6730696558952332,
+ "learning_rate": 0.00016359366604364972,
+ "loss": 1.5849,
+ "step": 841
+ },
+ {
+ "epoch": 0.8422368791222531,
+ "grad_norm": 0.7051465511322021,
+ "learning_rate": 0.00016351273366685526,
+ "loss": 1.5972,
+ "step": 842
+ },
+ {
+ "epoch": 0.843237160451377,
+ "grad_norm": 0.7309426069259644,
+ "learning_rate": 0.00016343173150105278,
+ "loss": 1.4612,
+ "step": 843
+ },
+ {
+ "epoch": 0.8442374417805008,
+ "grad_norm": 0.7830431461334229,
+ "learning_rate": 0.00016335065963524897,
+ "loss": 1.7208,
+ "step": 844
+ },
+ {
+ "epoch": 0.8452377231096245,
+ "grad_norm": 0.8609834909439087,
+ "learning_rate": 0.0001632695181585272,
+ "loss": 1.8229,
+ "step": 845
+ },
+ {
+ "epoch": 0.8462380044387484,
+ "grad_norm": 0.7489060759544373,
+ "learning_rate": 0.00016318830716004722,
+ "loss": 1.6955,
+ "step": 846
+ },
+ {
+ "epoch": 0.8472382857678722,
+ "grad_norm": 0.636900782585144,
+ "learning_rate": 0.00016310702672904528,
+ "loss": 1.6664,
+ "step": 847
+ },
+ {
+ "epoch": 0.8482385670969961,
+ "grad_norm": 0.6423529386520386,
+ "learning_rate": 0.00016302567695483382,
+ "loss": 1.5356,
+ "step": 848
+ },
+ {
+ "epoch": 0.8492388484261199,
+ "grad_norm": 0.7380033731460571,
+ "learning_rate": 0.0001629442579268016,
+ "loss": 1.4482,
+ "step": 849
+ },
+ {
+ "epoch": 0.8502391297552436,
+ "grad_norm": 0.8258544206619263,
+ "learning_rate": 0.00016286276973441333,
+ "loss": 1.7058,
+ "step": 850
+ },
+ {
+ "epoch": 0.8512394110843675,
+ "grad_norm": 0.6473391056060791,
+ "learning_rate": 0.00016278121246720987,
+ "loss": 1.5374,
+ "step": 851
+ },
+ {
+ "epoch": 0.8522396924134913,
+ "grad_norm": 0.7097072005271912,
+ "learning_rate": 0.00016269958621480788,
+ "loss": 1.6786,
+ "step": 852
+ },
+ {
+ "epoch": 0.8532399737426151,
+ "grad_norm": 0.724993884563446,
+ "learning_rate": 0.0001626178910668998,
+ "loss": 1.6022,
+ "step": 853
+ },
+ {
+ "epoch": 0.854240255071739,
+ "grad_norm": 0.6800474524497986,
+ "learning_rate": 0.00016253612711325386,
+ "loss": 1.6382,
+ "step": 854
+ },
+ {
+ "epoch": 0.8552405364008627,
+ "grad_norm": 0.6339759826660156,
+ "learning_rate": 0.0001624542944437139,
+ "loss": 1.5641,
+ "step": 855
+ },
+ {
+ "epoch": 0.8562408177299866,
+ "grad_norm": 0.6792349219322205,
+ "learning_rate": 0.00016237239314819917,
+ "loss": 1.3713,
+ "step": 856
+ },
+ {
+ "epoch": 0.8572410990591104,
+ "grad_norm": 0.6544696688652039,
+ "learning_rate": 0.0001622904233167044,
+ "loss": 1.5639,
+ "step": 857
+ },
+ {
+ "epoch": 0.8582413803882342,
+ "grad_norm": 0.7736073732376099,
+ "learning_rate": 0.0001622083850392996,
+ "loss": 1.5454,
+ "step": 858
+ },
+ {
+ "epoch": 0.859241661717358,
+ "grad_norm": 0.8642422556877136,
+ "learning_rate": 0.00016212627840613003,
+ "loss": 1.6852,
+ "step": 859
+ },
+ {
+ "epoch": 0.8602419430464818,
+ "grad_norm": 0.6520773768424988,
+ "learning_rate": 0.000162044103507416,
+ "loss": 1.5335,
+ "step": 860
+ },
+ {
+ "epoch": 0.8612422243756056,
+ "grad_norm": 0.7647336721420288,
+ "learning_rate": 0.00016196186043345288,
+ "loss": 1.5578,
+ "step": 861
+ },
+ {
+ "epoch": 0.8622425057047295,
+ "grad_norm": 0.9621163010597229,
+ "learning_rate": 0.00016187954927461093,
+ "loss": 1.6976,
+ "step": 862
+ },
+ {
+ "epoch": 0.8632427870338533,
+ "grad_norm": 0.6847056746482849,
+ "learning_rate": 0.00016179717012133521,
+ "loss": 1.7118,
+ "step": 863
+ },
+ {
+ "epoch": 0.864243068362977,
+ "grad_norm": 0.7482467889785767,
+ "learning_rate": 0.00016171472306414554,
+ "loss": 1.6601,
+ "step": 864
+ },
+ {
+ "epoch": 0.8652433496921009,
+ "grad_norm": 0.7760444283485413,
+ "learning_rate": 0.00016163220819363628,
+ "loss": 1.5587,
+ "step": 865
+ },
+ {
+ "epoch": 0.8662436310212247,
+ "grad_norm": 0.8380980491638184,
+ "learning_rate": 0.00016154962560047643,
+ "loss": 1.7171,
+ "step": 866
+ },
+ {
+ "epoch": 0.8672439123503486,
+ "grad_norm": 0.6927618384361267,
+ "learning_rate": 0.00016146697537540924,
+ "loss": 1.7244,
+ "step": 867
+ },
+ {
+ "epoch": 0.8682441936794724,
+ "grad_norm": 0.7855746746063232,
+ "learning_rate": 0.0001613842576092524,
+ "loss": 1.5848,
+ "step": 868
+ },
+ {
+ "epoch": 0.8692444750085961,
+ "grad_norm": 0.6743006110191345,
+ "learning_rate": 0.00016130147239289778,
+ "loss": 1.6969,
+ "step": 869
+ },
+ {
+ "epoch": 0.87024475633772,
+ "grad_norm": 0.7060980200767517,
+ "learning_rate": 0.00016121861981731135,
+ "loss": 1.5632,
+ "step": 870
+ },
+ {
+ "epoch": 0.8712450376668438,
+ "grad_norm": 0.7673144340515137,
+ "learning_rate": 0.00016113569997353312,
+ "loss": 1.5687,
+ "step": 871
+ },
+ {
+ "epoch": 0.8722453189959676,
+ "grad_norm": 0.8105847239494324,
+ "learning_rate": 0.000161052712952677,
+ "loss": 1.6074,
+ "step": 872
+ },
+ {
+ "epoch": 0.8732456003250915,
+ "grad_norm": 0.6536850333213806,
+ "learning_rate": 0.0001609696588459307,
+ "loss": 1.5842,
+ "step": 873
+ },
+ {
+ "epoch": 0.8742458816542152,
+ "grad_norm": 0.6653574705123901,
+ "learning_rate": 0.00016088653774455568,
+ "loss": 1.4652,
+ "step": 874
+ },
+ {
+ "epoch": 0.875246162983339,
+ "grad_norm": 0.7202721238136292,
+ "learning_rate": 0.00016080334973988695,
+ "loss": 1.5212,
+ "step": 875
+ },
+ {
+ "epoch": 0.8762464443124629,
+ "grad_norm": 0.8218807578086853,
+ "learning_rate": 0.00016072009492333318,
+ "loss": 1.803,
+ "step": 876
+ },
+ {
+ "epoch": 0.8772467256415867,
+ "grad_norm": 0.6170400381088257,
+ "learning_rate": 0.0001606367733863763,
+ "loss": 1.5313,
+ "step": 877
+ },
+ {
+ "epoch": 0.8782470069707106,
+ "grad_norm": 0.6750448346138,
+ "learning_rate": 0.00016055338522057158,
+ "loss": 1.6183,
+ "step": 878
+ },
+ {
+ "epoch": 0.8792472882998343,
+ "grad_norm": 0.6602128148078918,
+ "learning_rate": 0.00016046993051754756,
+ "loss": 1.6669,
+ "step": 879
+ },
+ {
+ "epoch": 0.8802475696289581,
+ "grad_norm": 0.7064031958580017,
+ "learning_rate": 0.00016038640936900586,
+ "loss": 1.7458,
+ "step": 880
+ },
+ {
+ "epoch": 0.881247850958082,
+ "grad_norm": 0.5916783809661865,
+ "learning_rate": 0.00016030282186672116,
+ "loss": 1.4966,
+ "step": 881
+ },
+ {
+ "epoch": 0.8822481322872058,
+ "grad_norm": 0.7189202904701233,
+ "learning_rate": 0.00016021916810254097,
+ "loss": 1.5812,
+ "step": 882
+ },
+ {
+ "epoch": 0.8832484136163296,
+ "grad_norm": 0.7760966420173645,
+ "learning_rate": 0.00016013544816838565,
+ "loss": 1.6709,
+ "step": 883
+ },
+ {
+ "epoch": 0.8842486949454534,
+ "grad_norm": 0.6894650459289551,
+ "learning_rate": 0.00016005166215624827,
+ "loss": 1.6255,
+ "step": 884
+ },
+ {
+ "epoch": 0.8852489762745772,
+ "grad_norm": 0.6777058839797974,
+ "learning_rate": 0.0001599678101581945,
+ "loss": 1.7479,
+ "step": 885
+ },
+ {
+ "epoch": 0.886249257603701,
+ "grad_norm": 0.7056024670600891,
+ "learning_rate": 0.00015988389226636253,
+ "loss": 1.7896,
+ "step": 886
+ },
+ {
+ "epoch": 0.8872495389328249,
+ "grad_norm": 0.6465604305267334,
+ "learning_rate": 0.00015979990857296295,
+ "loss": 1.7363,
+ "step": 887
+ },
+ {
+ "epoch": 0.8882498202619487,
+ "grad_norm": 0.6703017950057983,
+ "learning_rate": 0.00015971585917027862,
+ "loss": 1.6617,
+ "step": 888
+ },
+ {
+ "epoch": 0.8892501015910725,
+ "grad_norm": 0.7116142511367798,
+ "learning_rate": 0.00015963174415066468,
+ "loss": 1.8232,
+ "step": 889
+ },
+ {
+ "epoch": 0.8902503829201963,
+ "grad_norm": 0.7552229762077332,
+ "learning_rate": 0.0001595475636065483,
+ "loss": 1.7847,
+ "step": 890
+ },
+ {
+ "epoch": 0.8912506642493201,
+ "grad_norm": 0.70728999376297,
+ "learning_rate": 0.00015946331763042867,
+ "loss": 1.5665,
+ "step": 891
+ },
+ {
+ "epoch": 0.892250945578444,
+ "grad_norm": 0.6701356768608093,
+ "learning_rate": 0.00015937900631487686,
+ "loss": 1.3572,
+ "step": 892
+ },
+ {
+ "epoch": 0.8932512269075678,
+ "grad_norm": 0.6960388422012329,
+ "learning_rate": 0.00015929462975253585,
+ "loss": 1.5815,
+ "step": 893
+ },
+ {
+ "epoch": 0.8942515082366915,
+ "grad_norm": 0.6505674719810486,
+ "learning_rate": 0.00015921018803612014,
+ "loss": 1.7499,
+ "step": 894
+ },
+ {
+ "epoch": 0.8952517895658154,
+ "grad_norm": 0.604205310344696,
+ "learning_rate": 0.0001591256812584159,
+ "loss": 1.6838,
+ "step": 895
+ },
+ {
+ "epoch": 0.8962520708949392,
+ "grad_norm": 0.5875198841094971,
+ "learning_rate": 0.00015904110951228082,
+ "loss": 1.5147,
+ "step": 896
+ },
+ {
+ "epoch": 0.897252352224063,
+ "grad_norm": 0.6970433592796326,
+ "learning_rate": 0.00015895647289064396,
+ "loss": 1.7767,
+ "step": 897
+ },
+ {
+ "epoch": 0.8982526335531869,
+ "grad_norm": 0.7364515066146851,
+ "learning_rate": 0.00015887177148650564,
+ "loss": 1.6672,
+ "step": 898
+ },
+ {
+ "epoch": 0.8992529148823106,
+ "grad_norm": 0.7843589186668396,
+ "learning_rate": 0.0001587870053929374,
+ "loss": 1.689,
+ "step": 899
+ },
+ {
+ "epoch": 0.9002531962114345,
+ "grad_norm": 0.6405196189880371,
+ "learning_rate": 0.00015870217470308188,
+ "loss": 1.5917,
+ "step": 900
+ },
+ {
+ "epoch": 0.9012534775405583,
+ "grad_norm": 0.7019757628440857,
+ "learning_rate": 0.0001586172795101526,
+ "loss": 1.5497,
+ "step": 901
+ },
+ {
+ "epoch": 0.9022537588696821,
+ "grad_norm": 0.8048270344734192,
+ "learning_rate": 0.00015853231990743406,
+ "loss": 1.5821,
+ "step": 902
+ },
+ {
+ "epoch": 0.903254040198806,
+ "grad_norm": 0.6245777606964111,
+ "learning_rate": 0.0001584472959882815,
+ "loss": 1.5688,
+ "step": 903
+ },
+ {
+ "epoch": 0.9042543215279297,
+ "grad_norm": 0.6584132313728333,
+ "learning_rate": 0.00015836220784612085,
+ "loss": 1.4555,
+ "step": 904
+ },
+ {
+ "epoch": 0.9052546028570535,
+ "grad_norm": 0.7710773944854736,
+ "learning_rate": 0.00015827705557444852,
+ "loss": 1.6416,
+ "step": 905
+ },
+ {
+ "epoch": 0.9062548841861774,
+ "grad_norm": 0.6738126277923584,
+ "learning_rate": 0.00015819183926683153,
+ "loss": 1.6272,
+ "step": 906
+ },
+ {
+ "epoch": 0.9072551655153012,
+ "grad_norm": 0.6698735356330872,
+ "learning_rate": 0.00015810655901690715,
+ "loss": 1.4778,
+ "step": 907
+ },
+ {
+ "epoch": 0.9082554468444249,
+ "grad_norm": 1.0088928937911987,
+ "learning_rate": 0.00015802121491838297,
+ "loss": 1.6854,
+ "step": 908
+ },
+ {
+ "epoch": 0.9092557281735488,
+ "grad_norm": 0.6948708891868591,
+ "learning_rate": 0.0001579358070650367,
+ "loss": 1.5673,
+ "step": 909
+ },
+ {
+ "epoch": 0.9102560095026726,
+ "grad_norm": 0.6728948950767517,
+ "learning_rate": 0.00015785033555071616,
+ "loss": 1.6646,
+ "step": 910
+ },
+ {
+ "epoch": 0.9112562908317965,
+ "grad_norm": 0.8096952438354492,
+ "learning_rate": 0.00015776480046933905,
+ "loss": 1.4675,
+ "step": 911
+ },
+ {
+ "epoch": 0.9122565721609203,
+ "grad_norm": 0.6625403761863708,
+ "learning_rate": 0.000157679201914893,
+ "loss": 1.4793,
+ "step": 912
+ },
+ {
+ "epoch": 0.913256853490044,
+ "grad_norm": 0.7129424810409546,
+ "learning_rate": 0.00015759353998143528,
+ "loss": 1.574,
+ "step": 913
+ },
+ {
+ "epoch": 0.9142571348191679,
+ "grad_norm": 0.6151349544525146,
+ "learning_rate": 0.00015750781476309288,
+ "loss": 1.5631,
+ "step": 914
+ },
+ {
+ "epoch": 0.9152574161482917,
+ "grad_norm": 0.7185074687004089,
+ "learning_rate": 0.00015742202635406235,
+ "loss": 1.8382,
+ "step": 915
+ },
+ {
+ "epoch": 0.9162576974774155,
+ "grad_norm": 0.7076066732406616,
+ "learning_rate": 0.00015733617484860963,
+ "loss": 1.5394,
+ "step": 916
+ },
+ {
+ "epoch": 0.9172579788065394,
+ "grad_norm": 0.7286276817321777,
+ "learning_rate": 0.00015725026034106996,
+ "loss": 1.8139,
+ "step": 917
+ },
+ {
+ "epoch": 0.9182582601356631,
+ "grad_norm": 0.757075846195221,
+ "learning_rate": 0.00015716428292584787,
+ "loss": 1.6768,
+ "step": 918
+ },
+ {
+ "epoch": 0.919258541464787,
+ "grad_norm": 0.6926739811897278,
+ "learning_rate": 0.00015707824269741702,
+ "loss": 1.4541,
+ "step": 919
+ },
+ {
+ "epoch": 0.9202588227939108,
+ "grad_norm": 0.6489847898483276,
+ "learning_rate": 0.00015699213975031996,
+ "loss": 1.4725,
+ "step": 920
+ },
+ {
+ "epoch": 0.9212591041230346,
+ "grad_norm": 0.7668707966804504,
+ "learning_rate": 0.0001569059741791684,
+ "loss": 1.4239,
+ "step": 921
+ },
+ {
+ "epoch": 0.9222593854521585,
+ "grad_norm": 0.736863911151886,
+ "learning_rate": 0.0001568197460786426,
+ "loss": 1.6117,
+ "step": 922
+ },
+ {
+ "epoch": 0.9232596667812822,
+ "grad_norm": 0.8462884426116943,
+ "learning_rate": 0.0001567334555434917,
+ "loss": 1.5025,
+ "step": 923
+ },
+ {
+ "epoch": 0.924259948110406,
+ "grad_norm": 0.7481950521469116,
+ "learning_rate": 0.0001566471026685334,
+ "loss": 1.5024,
+ "step": 924
+ },
+ {
+ "epoch": 0.9252602294395299,
+ "grad_norm": 0.6457516551017761,
+ "learning_rate": 0.00015656068754865387,
+ "loss": 1.4526,
+ "step": 925
+ },
+ {
+ "epoch": 0.9262605107686537,
+ "grad_norm": 0.809140682220459,
+ "learning_rate": 0.00015647421027880772,
+ "loss": 1.4449,
+ "step": 926
+ },
+ {
+ "epoch": 0.9272607920977775,
+ "grad_norm": 0.6967790126800537,
+ "learning_rate": 0.0001563876709540178,
+ "loss": 1.5552,
+ "step": 927
+ },
+ {
+ "epoch": 0.9282610734269013,
+ "grad_norm": 0.6858595609664917,
+ "learning_rate": 0.0001563010696693752,
+ "loss": 1.6202,
+ "step": 928
+ },
+ {
+ "epoch": 0.9292613547560251,
+ "grad_norm": 0.7033559679985046,
+ "learning_rate": 0.00015621440652003907,
+ "loss": 1.7186,
+ "step": 929
+ },
+ {
+ "epoch": 0.930261636085149,
+ "grad_norm": 0.6527283787727356,
+ "learning_rate": 0.00015612768160123652,
+ "loss": 1.5028,
+ "step": 930
+ },
+ {
+ "epoch": 0.9312619174142728,
+ "grad_norm": 0.7243602275848389,
+ "learning_rate": 0.00015604089500826257,
+ "loss": 1.6729,
+ "step": 931
+ },
+ {
+ "epoch": 0.9322621987433966,
+ "grad_norm": 0.6734297275543213,
+ "learning_rate": 0.00015595404683648,
+ "loss": 1.4731,
+ "step": 932
+ },
+ {
+ "epoch": 0.9332624800725204,
+ "grad_norm": 0.7641247510910034,
+ "learning_rate": 0.00015586713718131922,
+ "loss": 1.5851,
+ "step": 933
+ },
+ {
+ "epoch": 0.9342627614016442,
+ "grad_norm": 0.7062788009643555,
+ "learning_rate": 0.0001557801661382782,
+ "loss": 1.5735,
+ "step": 934
+ },
+ {
+ "epoch": 0.935263042730768,
+ "grad_norm": 0.6413556337356567,
+ "learning_rate": 0.00015569313380292248,
+ "loss": 1.5854,
+ "step": 935
+ },
+ {
+ "epoch": 0.9362633240598919,
+ "grad_norm": 0.645720362663269,
+ "learning_rate": 0.00015560604027088477,
+ "loss": 1.5072,
+ "step": 936
+ },
+ {
+ "epoch": 0.9372636053890157,
+ "grad_norm": 0.6726225018501282,
+ "learning_rate": 0.00015551888563786515,
+ "loss": 1.587,
+ "step": 937
+ },
+ {
+ "epoch": 0.9382638867181394,
+ "grad_norm": 0.7043680548667908,
+ "learning_rate": 0.00015543166999963076,
+ "loss": 1.6577,
+ "step": 938
+ },
+ {
+ "epoch": 0.9392641680472633,
+ "grad_norm": 0.7049617767333984,
+ "learning_rate": 0.0001553443934520159,
+ "loss": 1.7624,
+ "step": 939
+ },
+ {
+ "epoch": 0.9402644493763871,
+ "grad_norm": 0.7060776352882385,
+ "learning_rate": 0.00015525705609092157,
+ "loss": 1.6208,
+ "step": 940
+ },
+ {
+ "epoch": 0.941264730705511,
+ "grad_norm": 0.6215025186538696,
+ "learning_rate": 0.00015516965801231586,
+ "loss": 1.4645,
+ "step": 941
+ },
+ {
+ "epoch": 0.9422650120346348,
+ "grad_norm": 0.7021099328994751,
+ "learning_rate": 0.0001550821993122334,
+ "loss": 1.566,
+ "step": 942
+ },
+ {
+ "epoch": 0.9432652933637585,
+ "grad_norm": 0.6451042294502258,
+ "learning_rate": 0.0001549946800867755,
+ "loss": 1.7491,
+ "step": 943
+ },
+ {
+ "epoch": 0.9442655746928824,
+ "grad_norm": 0.7288572192192078,
+ "learning_rate": 0.00015490710043210997,
+ "loss": 1.6302,
+ "step": 944
+ },
+ {
+ "epoch": 0.9452658560220062,
+ "grad_norm": 0.7850833535194397,
+ "learning_rate": 0.00015481946044447099,
+ "loss": 1.5673,
+ "step": 945
+ },
+ {
+ "epoch": 0.94626613735113,
+ "grad_norm": 0.7459181547164917,
+ "learning_rate": 0.00015473176022015906,
+ "loss": 1.4529,
+ "step": 946
+ },
+ {
+ "epoch": 0.9472664186802539,
+ "grad_norm": 0.7002627849578857,
+ "learning_rate": 0.0001546439998555409,
+ "loss": 1.8814,
+ "step": 947
+ },
+ {
+ "epoch": 0.9482667000093776,
+ "grad_norm": 0.6664572358131409,
+ "learning_rate": 0.0001545561794470492,
+ "loss": 1.5337,
+ "step": 948
+ },
+ {
+ "epoch": 0.9492669813385014,
+ "grad_norm": 0.757116973400116,
+ "learning_rate": 0.00015446829909118275,
+ "loss": 1.5775,
+ "step": 949
+ },
+ {
+ "epoch": 0.9502672626676253,
+ "grad_norm": 0.7456643581390381,
+ "learning_rate": 0.00015438035888450623,
+ "loss": 1.525,
+ "step": 950
+ },
+ {
+ "epoch": 0.9512675439967491,
+ "grad_norm": 0.6722500920295715,
+ "learning_rate": 0.00015429235892364994,
+ "loss": 1.5059,
+ "step": 951
+ },
+ {
+ "epoch": 0.952267825325873,
+ "grad_norm": 0.7431210279464722,
+ "learning_rate": 0.00015420429930530996,
+ "loss": 1.6867,
+ "step": 952
+ },
+ {
+ "epoch": 0.9532681066549967,
+ "grad_norm": 0.751015305519104,
+ "learning_rate": 0.00015411618012624786,
+ "loss": 1.7371,
+ "step": 953
+ },
+ {
+ "epoch": 0.9542683879841205,
+ "grad_norm": 0.807579517364502,
+ "learning_rate": 0.00015402800148329071,
+ "loss": 1.7353,
+ "step": 954
+ },
+ {
+ "epoch": 0.9552686693132444,
+ "grad_norm": 0.608161449432373,
+ "learning_rate": 0.00015393976347333088,
+ "loss": 1.3074,
+ "step": 955
+ },
+ {
+ "epoch": 0.9562689506423682,
+ "grad_norm": 0.7092815637588501,
+ "learning_rate": 0.00015385146619332596,
+ "loss": 1.676,
+ "step": 956
+ },
+ {
+ "epoch": 0.9572692319714919,
+ "grad_norm": 0.7639429569244385,
+ "learning_rate": 0.00015376310974029873,
+ "loss": 1.6452,
+ "step": 957
+ },
+ {
+ "epoch": 0.9582695133006158,
+ "grad_norm": 0.7333659529685974,
+ "learning_rate": 0.00015367469421133695,
+ "loss": 1.6821,
+ "step": 958
+ },
+ {
+ "epoch": 0.9592697946297396,
+ "grad_norm": 0.7246838212013245,
+ "learning_rate": 0.00015358621970359325,
+ "loss": 1.5078,
+ "step": 959
+ },
+ {
+ "epoch": 0.9602700759588634,
+ "grad_norm": 0.7209622859954834,
+ "learning_rate": 0.00015349768631428519,
+ "loss": 1.5617,
+ "step": 960
+ },
+ {
+ "epoch": 0.9612703572879873,
+ "grad_norm": 0.7034916877746582,
+ "learning_rate": 0.00015340909414069488,
+ "loss": 1.4711,
+ "step": 961
+ },
+ {
+ "epoch": 0.962270638617111,
+ "grad_norm": 0.7311360239982605,
+ "learning_rate": 0.00015332044328016914,
+ "loss": 1.6488,
+ "step": 962
+ },
+ {
+ "epoch": 0.9632709199462349,
+ "grad_norm": 0.6668992638587952,
+ "learning_rate": 0.0001532317338301192,
+ "loss": 1.6804,
+ "step": 963
+ },
+ {
+ "epoch": 0.9642712012753587,
+ "grad_norm": 0.6265329122543335,
+ "learning_rate": 0.00015314296588802076,
+ "loss": 1.8169,
+ "step": 964
+ },
+ {
+ "epoch": 0.9652714826044825,
+ "grad_norm": 0.6945448517799377,
+ "learning_rate": 0.00015305413955141365,
+ "loss": 1.8041,
+ "step": 965
+ },
+ {
+ "epoch": 0.9662717639336064,
+ "grad_norm": 0.6718643307685852,
+ "learning_rate": 0.00015296525491790205,
+ "loss": 1.3486,
+ "step": 966
+ },
+ {
+ "epoch": 0.9672720452627301,
+ "grad_norm": 0.6232700943946838,
+ "learning_rate": 0.00015287631208515406,
+ "loss": 1.5672,
+ "step": 967
+ },
+ {
+ "epoch": 0.9682723265918539,
+ "grad_norm": 0.7481172680854797,
+ "learning_rate": 0.00015278731115090171,
+ "loss": 1.5992,
+ "step": 968
+ },
+ {
+ "epoch": 0.9692726079209778,
+ "grad_norm": 0.6585466861724854,
+ "learning_rate": 0.00015269825221294098,
+ "loss": 1.6403,
+ "step": 969
+ },
+ {
+ "epoch": 0.9702728892501016,
+ "grad_norm": 0.7587956786155701,
+ "learning_rate": 0.00015260913536913154,
+ "loss": 1.7991,
+ "step": 970
+ },
+ {
+ "epoch": 0.9712731705792254,
+ "grad_norm": 0.672698974609375,
+ "learning_rate": 0.00015251996071739664,
+ "loss": 1.4311,
+ "step": 971
+ },
+ {
+ "epoch": 0.9722734519083492,
+ "grad_norm": 0.7597199082374573,
+ "learning_rate": 0.00015243072835572318,
+ "loss": 1.5692,
+ "step": 972
+ },
+ {
+ "epoch": 0.973273733237473,
+ "grad_norm": 0.7342745661735535,
+ "learning_rate": 0.0001523414383821613,
+ "loss": 1.6364,
+ "step": 973
+ },
+ {
+ "epoch": 0.9742740145665969,
+ "grad_norm": 0.6640815138816833,
+ "learning_rate": 0.00015225209089482462,
+ "loss": 1.5113,
+ "step": 974
+ },
+ {
+ "epoch": 0.9752742958957207,
+ "grad_norm": 0.6298378109931946,
+ "learning_rate": 0.0001521626859918898,
+ "loss": 1.4822,
+ "step": 975
+ },
+ {
+ "epoch": 0.9762745772248445,
+ "grad_norm": 0.6862055659294128,
+ "learning_rate": 0.00015207322377159668,
+ "loss": 1.6159,
+ "step": 976
+ },
+ {
+ "epoch": 0.9772748585539683,
+ "grad_norm": 0.6377236843109131,
+ "learning_rate": 0.00015198370433224805,
+ "loss": 1.6046,
+ "step": 977
+ },
+ {
+ "epoch": 0.9782751398830921,
+ "grad_norm": 0.620070219039917,
+ "learning_rate": 0.00015189412777220958,
+ "loss": 1.589,
+ "step": 978
+ },
+ {
+ "epoch": 0.9792754212122159,
+ "grad_norm": 0.7776119112968445,
+ "learning_rate": 0.00015180449418990976,
+ "loss": 1.485,
+ "step": 979
+ },
+ {
+ "epoch": 0.9802757025413398,
+ "grad_norm": 0.8258413076400757,
+ "learning_rate": 0.00015171480368383964,
+ "loss": 1.5615,
+ "step": 980
+ },
+ {
+ "epoch": 0.9812759838704636,
+ "grad_norm": 0.7297958135604858,
+ "learning_rate": 0.00015162505635255287,
+ "loss": 1.5408,
+ "step": 981
+ },
+ {
+ "epoch": 0.9822762651995874,
+ "grad_norm": 0.5848103165626526,
+ "learning_rate": 0.00015153525229466555,
+ "loss": 1.6821,
+ "step": 982
+ },
+ {
+ "epoch": 0.9832765465287112,
+ "grad_norm": 0.7375655174255371,
+ "learning_rate": 0.00015144539160885613,
+ "loss": 1.7568,
+ "step": 983
+ },
+ {
+ "epoch": 0.984276827857835,
+ "grad_norm": 0.7466885447502136,
+ "learning_rate": 0.00015135547439386516,
+ "loss": 1.5805,
+ "step": 984
+ },
+ {
+ "epoch": 0.9852771091869589,
+ "grad_norm": 0.6645593047142029,
+ "learning_rate": 0.0001512655007484955,
+ "loss": 1.6776,
+ "step": 985
+ },
+ {
+ "epoch": 0.9862773905160827,
+ "grad_norm": 0.7973874807357788,
+ "learning_rate": 0.00015117547077161185,
+ "loss": 1.4931,
+ "step": 986
+ },
+ {
+ "epoch": 0.9872776718452064,
+ "grad_norm": 0.685391902923584,
+ "learning_rate": 0.0001510853845621409,
+ "loss": 1.6254,
+ "step": 987
+ },
+ {
+ "epoch": 0.9882779531743303,
+ "grad_norm": 0.6562414765357971,
+ "learning_rate": 0.00015099524221907107,
+ "loss": 1.6677,
+ "step": 988
+ },
+ {
+ "epoch": 0.9892782345034541,
+ "grad_norm": 0.6216359734535217,
+ "learning_rate": 0.0001509050438414525,
+ "loss": 1.6107,
+ "step": 989
+ },
+ {
+ "epoch": 0.9902785158325779,
+ "grad_norm": 0.7108810544013977,
+ "learning_rate": 0.00015081478952839693,
+ "loss": 1.5268,
+ "step": 990
+ },
+ {
+ "epoch": 0.9912787971617018,
+ "grad_norm": 0.7076026797294617,
+ "learning_rate": 0.00015072447937907753,
+ "loss": 1.3716,
+ "step": 991
+ },
+ {
+ "epoch": 0.9922790784908255,
+ "grad_norm": 0.6056272983551025,
+ "learning_rate": 0.00015063411349272877,
+ "loss": 1.4931,
+ "step": 992
+ },
+ {
+ "epoch": 0.9932793598199494,
+ "grad_norm": 0.726671576499939,
+ "learning_rate": 0.00015054369196864644,
+ "loss": 1.6409,
+ "step": 993
+ },
+ {
+ "epoch": 0.9942796411490732,
+ "grad_norm": 0.7019214630126953,
+ "learning_rate": 0.00015045321490618748,
+ "loss": 1.4476,
+ "step": 994
+ },
+ {
+ "epoch": 0.995279922478197,
+ "grad_norm": 0.755043625831604,
+ "learning_rate": 0.00015036268240476978,
+ "loss": 1.6674,
+ "step": 995
+ },
+ {
+ "epoch": 0.9962802038073209,
+ "grad_norm": 0.7450313568115234,
+ "learning_rate": 0.00015027209456387218,
+ "loss": 1.3706,
+ "step": 996
+ },
+ {
+ "epoch": 0.9972804851364446,
+ "grad_norm": 0.6804680228233337,
+ "learning_rate": 0.00015018145148303438,
+ "loss": 1.3878,
+ "step": 997
+ },
+ {
+ "epoch": 0.9982807664655684,
+ "grad_norm": 0.7353954315185547,
+ "learning_rate": 0.00015009075326185667,
+ "loss": 1.8656,
+ "step": 998
+ },
+ {
+ "epoch": 0.9992810477946923,
+ "grad_norm": 0.7213340401649475,
+ "learning_rate": 0.00015000000000000001,
+ "loss": 1.6031,
+ "step": 999
+ },
+ {
+ "epoch": 1.0002813291238162,
+ "grad_norm": 0.7066403031349182,
+ "learning_rate": 0.00014990919179718584,
+ "loss": 1.3663,
+ "step": 1000
+ },
+ {
+ "epoch": 1.0012816104529398,
+ "grad_norm": 0.6104635000228882,
+ "learning_rate": 0.00014981832875319597,
+ "loss": 1.3155,
+ "step": 1001
+ },
+ {
+ "epoch": 1.0022818917820637,
+ "grad_norm": 0.7524546384811401,
+ "learning_rate": 0.00014972741096787242,
+ "loss": 1.2042,
+ "step": 1002
+ },
+ {
+ "epoch": 1.0032821731111876,
+ "grad_norm": 0.6831395626068115,
+ "learning_rate": 0.0001496364385411174,
+ "loss": 1.3909,
+ "step": 1003
+ },
+ {
+ "epoch": 1.0042824544403113,
+ "grad_norm": 0.6223152875900269,
+ "learning_rate": 0.0001495454115728932,
+ "loss": 1.2693,
+ "step": 1004
+ },
+ {
+ "epoch": 1.0052827357694352,
+ "grad_norm": 0.6630414128303528,
+ "learning_rate": 0.0001494543301632219,
+ "loss": 1.5871,
+ "step": 1005
+ },
+ {
+ "epoch": 1.006283017098559,
+ "grad_norm": 0.6211387515068054,
+ "learning_rate": 0.00014936319441218555,
+ "loss": 1.5096,
+ "step": 1006
+ },
+ {
+ "epoch": 1.0072832984276827,
+ "grad_norm": 0.7009375095367432,
+ "learning_rate": 0.0001492720044199259,
+ "loss": 1.4553,
+ "step": 1007
+ },
+ {
+ "epoch": 1.0082835797568066,
+ "grad_norm": 0.607667088508606,
+ "learning_rate": 0.0001491807602866442,
+ "loss": 1.4655,
+ "step": 1008
+ },
+ {
+ "epoch": 1.0092838610859305,
+ "grad_norm": 0.7168284058570862,
+ "learning_rate": 0.00014908946211260123,
+ "loss": 1.32,
+ "step": 1009
+ },
+ {
+ "epoch": 1.0102841424150542,
+ "grad_norm": 0.6472702622413635,
+ "learning_rate": 0.00014899810999811726,
+ "loss": 1.418,
+ "step": 1010
+ },
+ {
+ "epoch": 1.011284423744178,
+ "grad_norm": 0.6901958584785461,
+ "learning_rate": 0.0001489067040435717,
+ "loss": 1.5842,
+ "step": 1011
+ },
+ {
+ "epoch": 1.012284705073302,
+ "grad_norm": 0.6948314905166626,
+ "learning_rate": 0.00014881524434940313,
+ "loss": 1.3352,
+ "step": 1012
+ },
+ {
+ "epoch": 1.0132849864024256,
+ "grad_norm": 0.6064580082893372,
+ "learning_rate": 0.0001487237310161093,
+ "loss": 1.2467,
+ "step": 1013
+ },
+ {
+ "epoch": 1.0142852677315495,
+ "grad_norm": 0.5783251523971558,
+ "learning_rate": 0.0001486321641442467,
+ "loss": 1.3932,
+ "step": 1014
+ },
+ {
+ "epoch": 1.0152855490606734,
+ "grad_norm": 0.6915367245674133,
+ "learning_rate": 0.00014854054383443081,
+ "loss": 1.5062,
+ "step": 1015
+ },
+ {
+ "epoch": 1.016285830389797,
+ "grad_norm": 0.7143461108207703,
+ "learning_rate": 0.00014844887018733582,
+ "loss": 1.3284,
+ "step": 1016
+ },
+ {
+ "epoch": 1.017286111718921,
+ "grad_norm": 0.7030971050262451,
+ "learning_rate": 0.00014835714330369446,
+ "loss": 1.5919,
+ "step": 1017
+ },
+ {
+ "epoch": 1.0182863930480448,
+ "grad_norm": 0.7102513909339905,
+ "learning_rate": 0.00014826536328429795,
+ "loss": 1.4448,
+ "step": 1018
+ },
+ {
+ "epoch": 1.0192866743771685,
+ "grad_norm": 0.6152640581130981,
+ "learning_rate": 0.000148173530229996,
+ "loss": 1.4771,
+ "step": 1019
+ },
+ {
+ "epoch": 1.0202869557062924,
+ "grad_norm": 0.6302015781402588,
+ "learning_rate": 0.00014808164424169647,
+ "loss": 1.3969,
+ "step": 1020
+ },
+ {
+ "epoch": 1.0212872370354162,
+ "grad_norm": 0.8721572756767273,
+ "learning_rate": 0.0001479897054203655,
+ "loss": 1.3515,
+ "step": 1021
+ },
+ {
+ "epoch": 1.0222875183645401,
+ "grad_norm": 1.096592903137207,
+ "learning_rate": 0.00014789771386702717,
+ "loss": 1.4757,
+ "step": 1022
+ },
+ {
+ "epoch": 1.0232877996936638,
+ "grad_norm": 0.7684335112571716,
+ "learning_rate": 0.0001478056696827636,
+ "loss": 1.2521,
+ "step": 1023
+ },
+ {
+ "epoch": 1.0242880810227877,
+ "grad_norm": 0.6189197301864624,
+ "learning_rate": 0.0001477135729687147,
+ "loss": 1.4304,
+ "step": 1024
+ },
+ {
+ "epoch": 1.0252883623519116,
+ "grad_norm": 0.6061127781867981,
+ "learning_rate": 0.0001476214238260781,
+ "loss": 1.4236,
+ "step": 1025
+ },
+ {
+ "epoch": 1.0262886436810352,
+ "grad_norm": 0.5413788557052612,
+ "learning_rate": 0.000147529222356109,
+ "loss": 1.1392,
+ "step": 1026
+ },
+ {
+ "epoch": 1.0272889250101591,
+ "grad_norm": 0.6879326105117798,
+ "learning_rate": 0.0001474369686601202,
+ "loss": 1.3966,
+ "step": 1027
+ },
+ {
+ "epoch": 1.028289206339283,
+ "grad_norm": 0.817315936088562,
+ "learning_rate": 0.0001473446628394818,
+ "loss": 1.6747,
+ "step": 1028
+ },
+ {
+ "epoch": 1.0292894876684067,
+ "grad_norm": 0.7139183282852173,
+ "learning_rate": 0.00014725230499562119,
+ "loss": 1.5432,
+ "step": 1029
+ },
+ {
+ "epoch": 1.0302897689975306,
+ "grad_norm": 0.7536730766296387,
+ "learning_rate": 0.00014715989523002296,
+ "loss": 1.5839,
+ "step": 1030
+ },
+ {
+ "epoch": 1.0312900503266544,
+ "grad_norm": 0.7000136375427246,
+ "learning_rate": 0.00014706743364422878,
+ "loss": 1.3519,
+ "step": 1031
+ },
+ {
+ "epoch": 1.032290331655778,
+ "grad_norm": 0.6579506993293762,
+ "learning_rate": 0.00014697492033983707,
+ "loss": 1.3622,
+ "step": 1032
+ },
+ {
+ "epoch": 1.033290612984902,
+ "grad_norm": 0.6257238984107971,
+ "learning_rate": 0.00014688235541850337,
+ "loss": 1.4393,
+ "step": 1033
+ },
+ {
+ "epoch": 1.0342908943140259,
+ "grad_norm": 0.749273955821991,
+ "learning_rate": 0.0001467897389819397,
+ "loss": 1.5201,
+ "step": 1034
+ },
+ {
+ "epoch": 1.0352911756431495,
+ "grad_norm": 0.7008610963821411,
+ "learning_rate": 0.00014669707113191483,
+ "loss": 1.3041,
+ "step": 1035
+ },
+ {
+ "epoch": 1.0362914569722734,
+ "grad_norm": 0.6838043332099915,
+ "learning_rate": 0.0001466043519702539,
+ "loss": 1.435,
+ "step": 1036
+ },
+ {
+ "epoch": 1.0372917383013973,
+ "grad_norm": 0.6197534799575806,
+ "learning_rate": 0.00014651158159883855,
+ "loss": 1.3806,
+ "step": 1037
+ },
+ {
+ "epoch": 1.038292019630521,
+ "grad_norm": 0.6906173825263977,
+ "learning_rate": 0.0001464187601196066,
+ "loss": 1.3898,
+ "step": 1038
+ },
+ {
+ "epoch": 1.0392923009596449,
+ "grad_norm": 0.5627701282501221,
+ "learning_rate": 0.00014632588763455212,
+ "loss": 1.3949,
+ "step": 1039
+ },
+ {
+ "epoch": 1.0402925822887688,
+ "grad_norm": 0.6588866710662842,
+ "learning_rate": 0.00014623296424572517,
+ "loss": 1.4041,
+ "step": 1040
+ },
+ {
+ "epoch": 1.0412928636178926,
+ "grad_norm": 0.7941678762435913,
+ "learning_rate": 0.00014613999005523174,
+ "loss": 1.429,
+ "step": 1041
+ },
+ {
+ "epoch": 1.0422931449470163,
+ "grad_norm": 0.5834561586380005,
+ "learning_rate": 0.00014604696516523361,
+ "loss": 1.4007,
+ "step": 1042
+ },
+ {
+ "epoch": 1.0432934262761402,
+ "grad_norm": 0.5992164015769958,
+ "learning_rate": 0.00014595388967794835,
+ "loss": 1.4029,
+ "step": 1043
+ },
+ {
+ "epoch": 1.044293707605264,
+ "grad_norm": 0.6714745759963989,
+ "learning_rate": 0.00014586076369564908,
+ "loss": 1.4421,
+ "step": 1044
+ },
+ {
+ "epoch": 1.0452939889343877,
+ "grad_norm": 0.6675744652748108,
+ "learning_rate": 0.00014576758732066442,
+ "loss": 1.4663,
+ "step": 1045
+ },
+ {
+ "epoch": 1.0462942702635116,
+ "grad_norm": 0.6605483293533325,
+ "learning_rate": 0.00014567436065537835,
+ "loss": 1.3919,
+ "step": 1046
+ },
+ {
+ "epoch": 1.0472945515926355,
+ "grad_norm": 0.6836503744125366,
+ "learning_rate": 0.00014558108380223012,
+ "loss": 1.3428,
+ "step": 1047
+ },
+ {
+ "epoch": 1.0482948329217592,
+ "grad_norm": 0.6451092958450317,
+ "learning_rate": 0.00014548775686371412,
+ "loss": 1.3717,
+ "step": 1048
+ },
+ {
+ "epoch": 1.049295114250883,
+ "grad_norm": 0.6579246520996094,
+ "learning_rate": 0.00014539437994237977,
+ "loss": 1.7364,
+ "step": 1049
+ },
+ {
+ "epoch": 1.050295395580007,
+ "grad_norm": 0.625912070274353,
+ "learning_rate": 0.00014530095314083143,
+ "loss": 1.5574,
+ "step": 1050
+ },
+ {
+ "epoch": 1.0512956769091306,
+ "grad_norm": 0.7133544087409973,
+ "learning_rate": 0.00014520747656172824,
+ "loss": 1.6031,
+ "step": 1051
+ },
+ {
+ "epoch": 1.0522959582382545,
+ "grad_norm": 0.6956666111946106,
+ "learning_rate": 0.00014511395030778406,
+ "loss": 1.6075,
+ "step": 1052
+ },
+ {
+ "epoch": 1.0532962395673784,
+ "grad_norm": 0.7082141041755676,
+ "learning_rate": 0.00014502037448176734,
+ "loss": 1.3839,
+ "step": 1053
+ },
+ {
+ "epoch": 1.054296520896502,
+ "grad_norm": 0.696561872959137,
+ "learning_rate": 0.000144926749186501,
+ "loss": 1.6738,
+ "step": 1054
+ },
+ {
+ "epoch": 1.055296802225626,
+ "grad_norm": 0.6995558142662048,
+ "learning_rate": 0.00014483307452486227,
+ "loss": 1.4732,
+ "step": 1055
+ },
+ {
+ "epoch": 1.0562970835547498,
+ "grad_norm": 0.7434210181236267,
+ "learning_rate": 0.0001447393505997827,
+ "loss": 1.4207,
+ "step": 1056
+ },
+ {
+ "epoch": 1.0572973648838735,
+ "grad_norm": 0.6679419279098511,
+ "learning_rate": 0.00014464557751424793,
+ "loss": 1.397,
+ "step": 1057
+ },
+ {
+ "epoch": 1.0582976462129974,
+ "grad_norm": 0.6747702360153198,
+ "learning_rate": 0.00014455175537129758,
+ "loss": 1.5247,
+ "step": 1058
+ },
+ {
+ "epoch": 1.0592979275421213,
+ "grad_norm": 0.6184663772583008,
+ "learning_rate": 0.00014445788427402528,
+ "loss": 1.2086,
+ "step": 1059
+ },
+ {
+ "epoch": 1.0602982088712452,
+ "grad_norm": 0.6546644568443298,
+ "learning_rate": 0.00014436396432557835,
+ "loss": 1.3795,
+ "step": 1060
+ },
+ {
+ "epoch": 1.0612984902003688,
+ "grad_norm": 0.6418478488922119,
+ "learning_rate": 0.00014426999562915782,
+ "loss": 1.3997,
+ "step": 1061
+ },
+ {
+ "epoch": 1.0622987715294927,
+ "grad_norm": 0.6456977725028992,
+ "learning_rate": 0.00014417597828801832,
+ "loss": 1.347,
+ "step": 1062
+ },
+ {
+ "epoch": 1.0632990528586166,
+ "grad_norm": 0.7379586696624756,
+ "learning_rate": 0.0001440819124054679,
+ "loss": 1.4168,
+ "step": 1063
+ },
+ {
+ "epoch": 1.0642993341877403,
+ "grad_norm": 0.583483099937439,
+ "learning_rate": 0.00014398779808486793,
+ "loss": 1.3724,
+ "step": 1064
+ },
+ {
+ "epoch": 1.0652996155168641,
+ "grad_norm": 0.8881146311759949,
+ "learning_rate": 0.00014389363542963306,
+ "loss": 1.2834,
+ "step": 1065
+ },
+ {
+ "epoch": 1.066299896845988,
+ "grad_norm": 0.6458824276924133,
+ "learning_rate": 0.000143799424543231,
+ "loss": 1.2557,
+ "step": 1066
+ },
+ {
+ "epoch": 1.0673001781751117,
+ "grad_norm": 0.8149404525756836,
+ "learning_rate": 0.0001437051655291825,
+ "loss": 1.5179,
+ "step": 1067
+ },
+ {
+ "epoch": 1.0683004595042356,
+ "grad_norm": 0.8752502202987671,
+ "learning_rate": 0.0001436108584910611,
+ "loss": 1.3922,
+ "step": 1068
+ },
+ {
+ "epoch": 1.0693007408333595,
+ "grad_norm": 0.6741296648979187,
+ "learning_rate": 0.0001435165035324933,
+ "loss": 1.464,
+ "step": 1069
+ },
+ {
+ "epoch": 1.0703010221624831,
+ "grad_norm": 0.6555476784706116,
+ "learning_rate": 0.000143422100757158,
+ "loss": 1.3172,
+ "step": 1070
+ },
+ {
+ "epoch": 1.071301303491607,
+ "grad_norm": 0.660168468952179,
+ "learning_rate": 0.00014332765026878687,
+ "loss": 1.3089,
+ "step": 1071
+ },
+ {
+ "epoch": 1.072301584820731,
+ "grad_norm": 0.8213777542114258,
+ "learning_rate": 0.0001432331521711639,
+ "loss": 1.4487,
+ "step": 1072
+ },
+ {
+ "epoch": 1.0733018661498546,
+ "grad_norm": 0.6642137765884399,
+ "learning_rate": 0.00014313860656812536,
+ "loss": 1.1624,
+ "step": 1073
+ },
+ {
+ "epoch": 1.0743021474789785,
+ "grad_norm": 0.6304247975349426,
+ "learning_rate": 0.00014304401356355983,
+ "loss": 1.2725,
+ "step": 1074
+ },
+ {
+ "epoch": 1.0753024288081023,
+ "grad_norm": 0.6976219415664673,
+ "learning_rate": 0.00014294937326140788,
+ "loss": 1.6664,
+ "step": 1075
+ },
+ {
+ "epoch": 1.076302710137226,
+ "grad_norm": 0.6528605818748474,
+ "learning_rate": 0.00014285468576566207,
+ "loss": 1.3489,
+ "step": 1076
+ },
+ {
+ "epoch": 1.07730299146635,
+ "grad_norm": 0.7203120589256287,
+ "learning_rate": 0.00014275995118036693,
+ "loss": 1.4319,
+ "step": 1077
+ },
+ {
+ "epoch": 1.0783032727954738,
+ "grad_norm": 0.8259358406066895,
+ "learning_rate": 0.00014266516960961852,
+ "loss": 1.3707,
+ "step": 1078
+ },
+ {
+ "epoch": 1.0793035541245974,
+ "grad_norm": 0.9485010504722595,
+ "learning_rate": 0.00014257034115756472,
+ "loss": 1.6787,
+ "step": 1079
+ },
+ {
+ "epoch": 1.0803038354537213,
+ "grad_norm": 0.6732786893844604,
+ "learning_rate": 0.0001424754659284048,
+ "loss": 1.4184,
+ "step": 1080
+ },
+ {
+ "epoch": 1.0813041167828452,
+ "grad_norm": 0.6673377752304077,
+ "learning_rate": 0.0001423805440263895,
+ "loss": 1.5084,
+ "step": 1081
+ },
+ {
+ "epoch": 1.0823043981119689,
+ "grad_norm": 0.6682411432266235,
+ "learning_rate": 0.0001422855755558208,
+ "loss": 1.4034,
+ "step": 1082
+ },
+ {
+ "epoch": 1.0833046794410928,
+ "grad_norm": 0.6940018534660339,
+ "learning_rate": 0.00014219056062105193,
+ "loss": 1.6816,
+ "step": 1083
+ },
+ {
+ "epoch": 1.0843049607702167,
+ "grad_norm": 0.7052391767501831,
+ "learning_rate": 0.0001420954993264871,
+ "loss": 1.5849,
+ "step": 1084
+ },
+ {
+ "epoch": 1.0853052420993405,
+ "grad_norm": 0.7090102434158325,
+ "learning_rate": 0.00014200039177658145,
+ "loss": 1.2906,
+ "step": 1085
+ },
+ {
+ "epoch": 1.0863055234284642,
+ "grad_norm": 0.7664905190467834,
+ "learning_rate": 0.000141905238075841,
+ "loss": 1.5504,
+ "step": 1086
+ },
+ {
+ "epoch": 1.087305804757588,
+ "grad_norm": 0.6557911038398743,
+ "learning_rate": 0.00014181003832882248,
+ "loss": 1.5846,
+ "step": 1087
+ },
+ {
+ "epoch": 1.088306086086712,
+ "grad_norm": 0.6342834234237671,
+ "learning_rate": 0.00014171479264013311,
+ "loss": 1.2405,
+ "step": 1088
+ },
+ {
+ "epoch": 1.0893063674158356,
+ "grad_norm": 0.7152488827705383,
+ "learning_rate": 0.00014161950111443077,
+ "loss": 1.5047,
+ "step": 1089
+ },
+ {
+ "epoch": 1.0903066487449595,
+ "grad_norm": 0.6031161546707153,
+ "learning_rate": 0.00014152416385642357,
+ "loss": 1.3203,
+ "step": 1090
+ },
+ {
+ "epoch": 1.0913069300740834,
+ "grad_norm": 0.6475042700767517,
+ "learning_rate": 0.00014142878097086995,
+ "loss": 1.191,
+ "step": 1091
+ },
+ {
+ "epoch": 1.092307211403207,
+ "grad_norm": 0.7956790924072266,
+ "learning_rate": 0.0001413333525625784,
+ "loss": 1.3601,
+ "step": 1092
+ },
+ {
+ "epoch": 1.093307492732331,
+ "grad_norm": 0.6703265309333801,
+ "learning_rate": 0.00014123787873640754,
+ "loss": 1.374,
+ "step": 1093
+ },
+ {
+ "epoch": 1.0943077740614549,
+ "grad_norm": 0.7583750486373901,
+ "learning_rate": 0.00014114235959726575,
+ "loss": 1.3064,
+ "step": 1094
+ },
+ {
+ "epoch": 1.0953080553905785,
+ "grad_norm": 0.6749271154403687,
+ "learning_rate": 0.0001410467952501114,
+ "loss": 1.4501,
+ "step": 1095
+ },
+ {
+ "epoch": 1.0963083367197024,
+ "grad_norm": 0.6708521842956543,
+ "learning_rate": 0.00014095118579995235,
+ "loss": 1.5046,
+ "step": 1096
+ },
+ {
+ "epoch": 1.0973086180488263,
+ "grad_norm": 0.5871726870536804,
+ "learning_rate": 0.0001408555313518461,
+ "loss": 1.3549,
+ "step": 1097
+ },
+ {
+ "epoch": 1.09830889937795,
+ "grad_norm": 0.6886669397354126,
+ "learning_rate": 0.00014075983201089964,
+ "loss": 1.3131,
+ "step": 1098
+ },
+ {
+ "epoch": 1.0993091807070738,
+ "grad_norm": 0.6243886351585388,
+ "learning_rate": 0.0001406640878822692,
+ "loss": 1.2278,
+ "step": 1099
+ },
+ {
+ "epoch": 1.1003094620361977,
+ "grad_norm": 0.7198624610900879,
+ "learning_rate": 0.00014056829907116024,
+ "loss": 1.4459,
+ "step": 1100
+ },
+ {
+ "epoch": 1.1013097433653214,
+ "grad_norm": 0.8059262037277222,
+ "learning_rate": 0.00014047246568282736,
+ "loss": 1.473,
+ "step": 1101
+ },
+ {
+ "epoch": 1.1023100246944453,
+ "grad_norm": 0.8409417271614075,
+ "learning_rate": 0.00014037658782257414,
+ "loss": 1.6688,
+ "step": 1102
+ },
+ {
+ "epoch": 1.1033103060235692,
+ "grad_norm": 0.739276111125946,
+ "learning_rate": 0.00014028066559575302,
+ "loss": 1.6182,
+ "step": 1103
+ },
+ {
+ "epoch": 1.104310587352693,
+ "grad_norm": 0.6282714009284973,
+ "learning_rate": 0.00014018469910776513,
+ "loss": 1.2835,
+ "step": 1104
+ },
+ {
+ "epoch": 1.1053108686818167,
+ "grad_norm": 0.7133497595787048,
+ "learning_rate": 0.0001400886884640603,
+ "loss": 1.5798,
+ "step": 1105
+ },
+ {
+ "epoch": 1.1063111500109406,
+ "grad_norm": 0.6376346945762634,
+ "learning_rate": 0.00013999263377013693,
+ "loss": 1.1436,
+ "step": 1106
+ },
+ {
+ "epoch": 1.1073114313400645,
+ "grad_norm": 0.5934734344482422,
+ "learning_rate": 0.00013989653513154165,
+ "loss": 1.3204,
+ "step": 1107
+ },
+ {
+ "epoch": 1.1083117126691882,
+ "grad_norm": 0.6655352115631104,
+ "learning_rate": 0.00013980039265386955,
+ "loss": 1.4602,
+ "step": 1108
+ },
+ {
+ "epoch": 1.109311993998312,
+ "grad_norm": 0.7147901058197021,
+ "learning_rate": 0.00013970420644276383,
+ "loss": 1.4124,
+ "step": 1109
+ },
+ {
+ "epoch": 1.110312275327436,
+ "grad_norm": 0.7845139503479004,
+ "learning_rate": 0.0001396079766039157,
+ "loss": 1.5831,
+ "step": 1110
+ },
+ {
+ "epoch": 1.1113125566565596,
+ "grad_norm": 0.8100587129592896,
+ "learning_rate": 0.00013951170324306435,
+ "loss": 1.6218,
+ "step": 1111
+ },
+ {
+ "epoch": 1.1123128379856835,
+ "grad_norm": 0.689988374710083,
+ "learning_rate": 0.00013941538646599687,
+ "loss": 1.2396,
+ "step": 1112
+ },
+ {
+ "epoch": 1.1133131193148074,
+ "grad_norm": 0.6771540641784668,
+ "learning_rate": 0.0001393190263785479,
+ "loss": 1.3739,
+ "step": 1113
+ },
+ {
+ "epoch": 1.114313400643931,
+ "grad_norm": 0.6424306631088257,
+ "learning_rate": 0.0001392226230865998,
+ "loss": 1.1653,
+ "step": 1114
+ },
+ {
+ "epoch": 1.115313681973055,
+ "grad_norm": 0.6135202646255493,
+ "learning_rate": 0.0001391261766960823,
+ "loss": 1.1924,
+ "step": 1115
+ },
+ {
+ "epoch": 1.1163139633021788,
+ "grad_norm": 0.6751917004585266,
+ "learning_rate": 0.00013902968731297255,
+ "loss": 1.4491,
+ "step": 1116
+ },
+ {
+ "epoch": 1.1173142446313025,
+ "grad_norm": 0.875303328037262,
+ "learning_rate": 0.00013893315504329498,
+ "loss": 1.3918,
+ "step": 1117
+ },
+ {
+ "epoch": 1.1183145259604264,
+ "grad_norm": 0.7102020978927612,
+ "learning_rate": 0.00013883657999312109,
+ "loss": 1.463,
+ "step": 1118
+ },
+ {
+ "epoch": 1.1193148072895502,
+ "grad_norm": 0.6863378882408142,
+ "learning_rate": 0.00013873996226856933,
+ "loss": 1.3958,
+ "step": 1119
+ },
+ {
+ "epoch": 1.120315088618674,
+ "grad_norm": 0.6769587397575378,
+ "learning_rate": 0.00013864330197580513,
+ "loss": 1.3044,
+ "step": 1120
+ },
+ {
+ "epoch": 1.1213153699477978,
+ "grad_norm": 0.7217769026756287,
+ "learning_rate": 0.0001385465992210407,
+ "loss": 1.6125,
+ "step": 1121
+ },
+ {
+ "epoch": 1.1223156512769217,
+ "grad_norm": 0.6756213903427124,
+ "learning_rate": 0.00013844985411053492,
+ "loss": 1.3658,
+ "step": 1122
+ },
+ {
+ "epoch": 1.1233159326060456,
+ "grad_norm": 0.7109145522117615,
+ "learning_rate": 0.00013835306675059308,
+ "loss": 1.5698,
+ "step": 1123
+ },
+ {
+ "epoch": 1.1243162139351692,
+ "grad_norm": 0.5903546810150146,
+ "learning_rate": 0.00013825623724756704,
+ "loss": 1.4429,
+ "step": 1124
+ },
+ {
+ "epoch": 1.1253164952642931,
+ "grad_norm": 0.7500163912773132,
+ "learning_rate": 0.00013815936570785487,
+ "loss": 1.2482,
+ "step": 1125
+ },
+ {
+ "epoch": 1.1263167765934168,
+ "grad_norm": 0.6458998918533325,
+ "learning_rate": 0.00013806245223790088,
+ "loss": 1.3496,
+ "step": 1126
+ },
+ {
+ "epoch": 1.1273170579225407,
+ "grad_norm": 0.627657413482666,
+ "learning_rate": 0.0001379654969441955,
+ "loss": 1.4847,
+ "step": 1127
+ },
+ {
+ "epoch": 1.1283173392516646,
+ "grad_norm": 0.7440046072006226,
+ "learning_rate": 0.000137868499933275,
+ "loss": 1.782,
+ "step": 1128
+ },
+ {
+ "epoch": 1.1293176205807884,
+ "grad_norm": 0.6717308163642883,
+ "learning_rate": 0.00013777146131172162,
+ "loss": 1.6345,
+ "step": 1129
+ },
+ {
+ "epoch": 1.130317901909912,
+ "grad_norm": 0.6480956673622131,
+ "learning_rate": 0.00013767438118616318,
+ "loss": 1.2862,
+ "step": 1130
+ },
+ {
+ "epoch": 1.131318183239036,
+ "grad_norm": 0.6778338551521301,
+ "learning_rate": 0.00013757725966327322,
+ "loss": 1.4821,
+ "step": 1131
+ },
+ {
+ "epoch": 1.1323184645681599,
+ "grad_norm": 0.6759636402130127,
+ "learning_rate": 0.00013748009684977073,
+ "loss": 1.5988,
+ "step": 1132
+ },
+ {
+ "epoch": 1.1333187458972835,
+ "grad_norm": 0.674404501914978,
+ "learning_rate": 0.0001373828928524201,
+ "loss": 1.4744,
+ "step": 1133
+ },
+ {
+ "epoch": 1.1343190272264074,
+ "grad_norm": 0.6017488241195679,
+ "learning_rate": 0.00013728564777803088,
+ "loss": 1.6296,
+ "step": 1134
+ },
+ {
+ "epoch": 1.1353193085555313,
+ "grad_norm": 0.6459933519363403,
+ "learning_rate": 0.00013718836173345783,
+ "loss": 1.5347,
+ "step": 1135
+ },
+ {
+ "epoch": 1.136319589884655,
+ "grad_norm": 0.6578681468963623,
+ "learning_rate": 0.00013709103482560078,
+ "loss": 1.4101,
+ "step": 1136
+ },
+ {
+ "epoch": 1.1373198712137789,
+ "grad_norm": 0.5906695127487183,
+ "learning_rate": 0.00013699366716140435,
+ "loss": 1.422,
+ "step": 1137
+ },
+ {
+ "epoch": 1.1383201525429028,
+ "grad_norm": 0.5622004866600037,
+ "learning_rate": 0.00013689625884785798,
+ "loss": 1.2805,
+ "step": 1138
+ },
+ {
+ "epoch": 1.1393204338720264,
+ "grad_norm": 0.7057269811630249,
+ "learning_rate": 0.00013679880999199583,
+ "loss": 1.5009,
+ "step": 1139
+ },
+ {
+ "epoch": 1.1403207152011503,
+ "grad_norm": 0.6655155420303345,
+ "learning_rate": 0.00013670132070089653,
+ "loss": 1.3504,
+ "step": 1140
+ },
+ {
+ "epoch": 1.1413209965302742,
+ "grad_norm": 0.6448667645454407,
+ "learning_rate": 0.00013660379108168324,
+ "loss": 1.4345,
+ "step": 1141
+ },
+ {
+ "epoch": 1.142321277859398,
+ "grad_norm": 0.6696295142173767,
+ "learning_rate": 0.00013650622124152334,
+ "loss": 1.3248,
+ "step": 1142
+ },
+ {
+ "epoch": 1.1433215591885217,
+ "grad_norm": 0.8913035988807678,
+ "learning_rate": 0.0001364086112876284,
+ "loss": 1.3148,
+ "step": 1143
+ },
+ {
+ "epoch": 1.1443218405176456,
+ "grad_norm": 0.6853944659233093,
+ "learning_rate": 0.00013631096132725413,
+ "loss": 1.4361,
+ "step": 1144
+ },
+ {
+ "epoch": 1.1453221218467693,
+ "grad_norm": 0.6286287307739258,
+ "learning_rate": 0.00013621327146770025,
+ "loss": 1.4485,
+ "step": 1145
+ },
+ {
+ "epoch": 1.1463224031758932,
+ "grad_norm": 0.6847277283668518,
+ "learning_rate": 0.00013611554181631013,
+ "loss": 1.4095,
+ "step": 1146
+ },
+ {
+ "epoch": 1.147322684505017,
+ "grad_norm": 0.6514857411384583,
+ "learning_rate": 0.00013601777248047105,
+ "loss": 1.4106,
+ "step": 1147
+ },
+ {
+ "epoch": 1.148322965834141,
+ "grad_norm": 0.6113057732582092,
+ "learning_rate": 0.0001359199635676138,
+ "loss": 1.3483,
+ "step": 1148
+ },
+ {
+ "epoch": 1.1493232471632646,
+ "grad_norm": 0.6366062164306641,
+ "learning_rate": 0.00013582211518521273,
+ "loss": 1.4164,
+ "step": 1149
+ },
+ {
+ "epoch": 1.1503235284923885,
+ "grad_norm": 0.6680336594581604,
+ "learning_rate": 0.00013572422744078551,
+ "loss": 1.5326,
+ "step": 1150
+ },
+ {
+ "epoch": 1.1513238098215124,
+ "grad_norm": 0.6046566367149353,
+ "learning_rate": 0.00013562630044189304,
+ "loss": 1.4139,
+ "step": 1151
+ },
+ {
+ "epoch": 1.152324091150636,
+ "grad_norm": 0.5869380235671997,
+ "learning_rate": 0.00013552833429613938,
+ "loss": 1.2859,
+ "step": 1152
+ },
+ {
+ "epoch": 1.15332437247976,
+ "grad_norm": 0.6923080682754517,
+ "learning_rate": 0.0001354303291111716,
+ "loss": 1.3419,
+ "step": 1153
+ },
+ {
+ "epoch": 1.1543246538088838,
+ "grad_norm": 0.698279082775116,
+ "learning_rate": 0.0001353322849946797,
+ "loss": 1.4796,
+ "step": 1154
+ },
+ {
+ "epoch": 1.1553249351380075,
+ "grad_norm": 0.6980450749397278,
+ "learning_rate": 0.00013523420205439646,
+ "loss": 1.5941,
+ "step": 1155
+ },
+ {
+ "epoch": 1.1563252164671314,
+ "grad_norm": 0.7222338914871216,
+ "learning_rate": 0.0001351360803980972,
+ "loss": 1.5019,
+ "step": 1156
+ },
+ {
+ "epoch": 1.1573254977962553,
+ "grad_norm": 0.6446929574012756,
+ "learning_rate": 0.00013503792013359997,
+ "loss": 1.2706,
+ "step": 1157
+ },
+ {
+ "epoch": 1.158325779125379,
+ "grad_norm": 0.699488639831543,
+ "learning_rate": 0.00013493972136876509,
+ "loss": 1.5775,
+ "step": 1158
+ },
+ {
+ "epoch": 1.1593260604545028,
+ "grad_norm": 0.6865110397338867,
+ "learning_rate": 0.00013484148421149527,
+ "loss": 1.5015,
+ "step": 1159
+ },
+ {
+ "epoch": 1.1603263417836267,
+ "grad_norm": 0.800570547580719,
+ "learning_rate": 0.0001347432087697354,
+ "loss": 1.401,
+ "step": 1160
+ },
+ {
+ "epoch": 1.1613266231127504,
+ "grad_norm": 0.706388533115387,
+ "learning_rate": 0.00013464489515147238,
+ "loss": 1.2895,
+ "step": 1161
+ },
+ {
+ "epoch": 1.1623269044418743,
+ "grad_norm": 0.7967466711997986,
+ "learning_rate": 0.0001345465434647351,
+ "loss": 1.848,
+ "step": 1162
+ },
+ {
+ "epoch": 1.1633271857709981,
+ "grad_norm": 0.7130827903747559,
+ "learning_rate": 0.00013444815381759425,
+ "loss": 1.2899,
+ "step": 1163
+ },
+ {
+ "epoch": 1.1643274671001218,
+ "grad_norm": 0.6437693238258362,
+ "learning_rate": 0.00013434972631816235,
+ "loss": 1.4456,
+ "step": 1164
+ },
+ {
+ "epoch": 1.1653277484292457,
+ "grad_norm": 0.6305271983146667,
+ "learning_rate": 0.0001342512610745933,
+ "loss": 1.3375,
+ "step": 1165
+ },
+ {
+ "epoch": 1.1663280297583696,
+ "grad_norm": 0.6622384786605835,
+ "learning_rate": 0.0001341527581950827,
+ "loss": 1.6379,
+ "step": 1166
+ },
+ {
+ "epoch": 1.1673283110874935,
+ "grad_norm": 0.64511638879776,
+ "learning_rate": 0.00013405421778786737,
+ "loss": 1.2818,
+ "step": 1167
+ },
+ {
+ "epoch": 1.1683285924166171,
+ "grad_norm": 0.6575477719306946,
+ "learning_rate": 0.00013395563996122537,
+ "loss": 1.2264,
+ "step": 1168
+ },
+ {
+ "epoch": 1.169328873745741,
+ "grad_norm": 0.787896990776062,
+ "learning_rate": 0.00013385702482347593,
+ "loss": 1.6519,
+ "step": 1169
+ },
+ {
+ "epoch": 1.170329155074865,
+ "grad_norm": 0.7512592077255249,
+ "learning_rate": 0.00013375837248297926,
+ "loss": 1.4776,
+ "step": 1170
+ },
+ {
+ "epoch": 1.1713294364039886,
+ "grad_norm": 0.9541054964065552,
+ "learning_rate": 0.0001336596830481364,
+ "loss": 1.5383,
+ "step": 1171
+ },
+ {
+ "epoch": 1.1723297177331125,
+ "grad_norm": 0.7181218266487122,
+ "learning_rate": 0.0001335609566273892,
+ "loss": 1.4088,
+ "step": 1172
+ },
+ {
+ "epoch": 1.1733299990622363,
+ "grad_norm": 0.7449761629104614,
+ "learning_rate": 0.00013346219332922016,
+ "loss": 1.2313,
+ "step": 1173
+ },
+ {
+ "epoch": 1.17433028039136,
+ "grad_norm": 0.7783718705177307,
+ "learning_rate": 0.00013336339326215228,
+ "loss": 1.4114,
+ "step": 1174
+ },
+ {
+ "epoch": 1.175330561720484,
+ "grad_norm": 0.8479213714599609,
+ "learning_rate": 0.00013326455653474897,
+ "loss": 1.6336,
+ "step": 1175
+ },
+ {
+ "epoch": 1.1763308430496078,
+ "grad_norm": 0.7903116941452026,
+ "learning_rate": 0.00013316568325561393,
+ "loss": 1.3251,
+ "step": 1176
+ },
+ {
+ "epoch": 1.1773311243787314,
+ "grad_norm": 0.7016749978065491,
+ "learning_rate": 0.00013306677353339098,
+ "loss": 1.5013,
+ "step": 1177
+ },
+ {
+ "epoch": 1.1783314057078553,
+ "grad_norm": 0.6183115839958191,
+ "learning_rate": 0.000132967827476764,
+ "loss": 1.3984,
+ "step": 1178
+ },
+ {
+ "epoch": 1.1793316870369792,
+ "grad_norm": 0.6227801442146301,
+ "learning_rate": 0.0001328688451944569,
+ "loss": 1.2306,
+ "step": 1179
+ },
+ {
+ "epoch": 1.1803319683661029,
+ "grad_norm": 0.7611119151115417,
+ "learning_rate": 0.00013276982679523322,
+ "loss": 1.3648,
+ "step": 1180
+ },
+ {
+ "epoch": 1.1813322496952268,
+ "grad_norm": 0.6929368376731873,
+ "learning_rate": 0.00013267077238789633,
+ "loss": 1.5107,
+ "step": 1181
+ },
+ {
+ "epoch": 1.1823325310243507,
+ "grad_norm": 0.6637099385261536,
+ "learning_rate": 0.00013257168208128908,
+ "loss": 1.5103,
+ "step": 1182
+ },
+ {
+ "epoch": 1.1833328123534743,
+ "grad_norm": 0.6320933103561401,
+ "learning_rate": 0.00013247255598429378,
+ "loss": 1.4024,
+ "step": 1183
+ },
+ {
+ "epoch": 1.1843330936825982,
+ "grad_norm": 0.8260888457298279,
+ "learning_rate": 0.00013237339420583212,
+ "loss": 1.4385,
+ "step": 1184
+ },
+ {
+ "epoch": 1.185333375011722,
+ "grad_norm": 0.7872930765151978,
+ "learning_rate": 0.00013227419685486492,
+ "loss": 1.2566,
+ "step": 1185
+ },
+ {
+ "epoch": 1.186333656340846,
+ "grad_norm": 0.6857215762138367,
+ "learning_rate": 0.00013217496404039218,
+ "loss": 1.619,
+ "step": 1186
+ },
+ {
+ "epoch": 1.1873339376699696,
+ "grad_norm": 0.708858072757721,
+ "learning_rate": 0.0001320756958714528,
+ "loss": 1.2228,
+ "step": 1187
+ },
+ {
+ "epoch": 1.1883342189990935,
+ "grad_norm": 0.6442694067955017,
+ "learning_rate": 0.00013197639245712454,
+ "loss": 1.5542,
+ "step": 1188
+ },
+ {
+ "epoch": 1.1893345003282172,
+ "grad_norm": 0.5615749359130859,
+ "learning_rate": 0.00013187705390652388,
+ "loss": 1.5289,
+ "step": 1189
+ },
+ {
+ "epoch": 1.190334781657341,
+ "grad_norm": 0.6250069737434387,
+ "learning_rate": 0.00013177768032880593,
+ "loss": 1.3849,
+ "step": 1190
+ },
+ {
+ "epoch": 1.191335062986465,
+ "grad_norm": 0.6521658301353455,
+ "learning_rate": 0.0001316782718331643,
+ "loss": 1.4118,
+ "step": 1191
+ },
+ {
+ "epoch": 1.1923353443155889,
+ "grad_norm": 0.7188206315040588,
+ "learning_rate": 0.0001315788285288309,
+ "loss": 1.4436,
+ "step": 1192
+ },
+ {
+ "epoch": 1.1933356256447125,
+ "grad_norm": 0.7098423838615417,
+ "learning_rate": 0.00013147935052507597,
+ "loss": 1.3339,
+ "step": 1193
+ },
+ {
+ "epoch": 1.1943359069738364,
+ "grad_norm": 0.655750036239624,
+ "learning_rate": 0.00013137983793120786,
+ "loss": 1.4208,
+ "step": 1194
+ },
+ {
+ "epoch": 1.1953361883029603,
+ "grad_norm": 0.640650749206543,
+ "learning_rate": 0.0001312802908565729,
+ "loss": 1.7209,
+ "step": 1195
+ },
+ {
+ "epoch": 1.196336469632084,
+ "grad_norm": 0.601091206073761,
+ "learning_rate": 0.0001311807094105553,
+ "loss": 1.4339,
+ "step": 1196
+ },
+ {
+ "epoch": 1.1973367509612078,
+ "grad_norm": 0.5894292593002319,
+ "learning_rate": 0.00013108109370257712,
+ "loss": 1.4687,
+ "step": 1197
+ },
+ {
+ "epoch": 1.1983370322903317,
+ "grad_norm": 0.7012053728103638,
+ "learning_rate": 0.00013098144384209796,
+ "loss": 1.5834,
+ "step": 1198
+ },
+ {
+ "epoch": 1.1993373136194554,
+ "grad_norm": 0.636356770992279,
+ "learning_rate": 0.000130881759938615,
+ "loss": 1.1723,
+ "step": 1199
+ },
+ {
+ "epoch": 1.2003375949485793,
+ "grad_norm": 0.6913763284683228,
+ "learning_rate": 0.00013078204210166278,
+ "loss": 1.3327,
+ "step": 1200
+ },
+ {
+ "epoch": 1.2013378762777032,
+ "grad_norm": 0.7067025303840637,
+ "learning_rate": 0.00013068229044081324,
+ "loss": 1.4671,
+ "step": 1201
+ },
+ {
+ "epoch": 1.2023381576068268,
+ "grad_norm": 0.698302149772644,
+ "learning_rate": 0.0001305825050656754,
+ "loss": 1.601,
+ "step": 1202
+ },
+ {
+ "epoch": 1.2033384389359507,
+ "grad_norm": 0.6283687949180603,
+ "learning_rate": 0.00013048268608589533,
+ "loss": 1.4164,
+ "step": 1203
+ },
+ {
+ "epoch": 1.2043387202650746,
+ "grad_norm": 0.6440062522888184,
+ "learning_rate": 0.00013038283361115603,
+ "loss": 1.3725,
+ "step": 1204
+ },
+ {
+ "epoch": 1.2053390015941985,
+ "grad_norm": 0.726294994354248,
+ "learning_rate": 0.0001302829477511773,
+ "loss": 1.4557,
+ "step": 1205
+ },
+ {
+ "epoch": 1.2063392829233222,
+ "grad_norm": 0.5984981060028076,
+ "learning_rate": 0.0001301830286157157,
+ "loss": 1.3455,
+ "step": 1206
+ },
+ {
+ "epoch": 1.207339564252446,
+ "grad_norm": 0.6096123456954956,
+ "learning_rate": 0.0001300830763145642,
+ "loss": 1.3562,
+ "step": 1207
+ },
+ {
+ "epoch": 1.2083398455815697,
+ "grad_norm": 0.815022349357605,
+ "learning_rate": 0.00012998309095755235,
+ "loss": 1.5287,
+ "step": 1208
+ },
+ {
+ "epoch": 1.2093401269106936,
+ "grad_norm": 0.990715742111206,
+ "learning_rate": 0.00012988307265454597,
+ "loss": 1.4186,
+ "step": 1209
+ },
+ {
+ "epoch": 1.2103404082398175,
+ "grad_norm": 0.8175992369651794,
+ "learning_rate": 0.0001297830215154471,
+ "loss": 1.3145,
+ "step": 1210
+ },
+ {
+ "epoch": 1.2113406895689414,
+ "grad_norm": 0.734703779220581,
+ "learning_rate": 0.00012968293765019384,
+ "loss": 1.359,
+ "step": 1211
+ },
+ {
+ "epoch": 1.212340970898065,
+ "grad_norm": 0.7687499523162842,
+ "learning_rate": 0.00012958282116876026,
+ "loss": 1.517,
+ "step": 1212
+ },
+ {
+ "epoch": 1.213341252227189,
+ "grad_norm": 0.5819374918937683,
+ "learning_rate": 0.00012948267218115624,
+ "loss": 1.353,
+ "step": 1213
+ },
+ {
+ "epoch": 1.2143415335563128,
+ "grad_norm": 0.6273573040962219,
+ "learning_rate": 0.00012938249079742743,
+ "loss": 1.3114,
+ "step": 1214
+ },
+ {
+ "epoch": 1.2153418148854365,
+ "grad_norm": 0.8144434094429016,
+ "learning_rate": 0.00012928227712765504,
+ "loss": 1.4763,
+ "step": 1215
+ },
+ {
+ "epoch": 1.2163420962145604,
+ "grad_norm": 0.6941531300544739,
+ "learning_rate": 0.0001291820312819558,
+ "loss": 1.3386,
+ "step": 1216
+ },
+ {
+ "epoch": 1.2173423775436842,
+ "grad_norm": 0.6548559069633484,
+ "learning_rate": 0.00012908175337048174,
+ "loss": 1.4763,
+ "step": 1217
+ },
+ {
+ "epoch": 1.218342658872808,
+ "grad_norm": 0.6587492227554321,
+ "learning_rate": 0.00012898144350342015,
+ "loss": 1.5342,
+ "step": 1218
+ },
+ {
+ "epoch": 1.2193429402019318,
+ "grad_norm": 0.6187465190887451,
+ "learning_rate": 0.0001288811017909934,
+ "loss": 1.5109,
+ "step": 1219
+ },
+ {
+ "epoch": 1.2203432215310557,
+ "grad_norm": 0.6152673959732056,
+ "learning_rate": 0.00012878072834345895,
+ "loss": 1.2812,
+ "step": 1220
+ },
+ {
+ "epoch": 1.2213435028601793,
+ "grad_norm": 0.6489769816398621,
+ "learning_rate": 0.00012868032327110904,
+ "loss": 1.4179,
+ "step": 1221
+ },
+ {
+ "epoch": 1.2223437841893032,
+ "grad_norm": 0.6900584697723389,
+ "learning_rate": 0.00012857988668427066,
+ "loss": 1.5862,
+ "step": 1222
+ },
+ {
+ "epoch": 1.2233440655184271,
+ "grad_norm": 0.7858991026878357,
+ "learning_rate": 0.0001284794186933055,
+ "loss": 1.5595,
+ "step": 1223
+ },
+ {
+ "epoch": 1.2243443468475508,
+ "grad_norm": 0.6736376285552979,
+ "learning_rate": 0.00012837891940860972,
+ "loss": 1.6053,
+ "step": 1224
+ },
+ {
+ "epoch": 1.2253446281766747,
+ "grad_norm": 0.6328126192092896,
+ "learning_rate": 0.00012827838894061377,
+ "loss": 1.5157,
+ "step": 1225
+ },
+ {
+ "epoch": 1.2263449095057986,
+ "grad_norm": 0.6681983470916748,
+ "learning_rate": 0.00012817782739978255,
+ "loss": 1.3832,
+ "step": 1226
+ },
+ {
+ "epoch": 1.2273451908349222,
+ "grad_norm": 0.6474846601486206,
+ "learning_rate": 0.00012807723489661495,
+ "loss": 1.4772,
+ "step": 1227
+ },
+ {
+ "epoch": 1.228345472164046,
+ "grad_norm": 0.7181409597396851,
+ "learning_rate": 0.00012797661154164395,
+ "loss": 1.4942,
+ "step": 1228
+ },
+ {
+ "epoch": 1.22934575349317,
+ "grad_norm": 0.8670255541801453,
+ "learning_rate": 0.00012787595744543647,
+ "loss": 1.3818,
+ "step": 1229
+ },
+ {
+ "epoch": 1.2303460348222939,
+ "grad_norm": 0.7007678747177124,
+ "learning_rate": 0.00012777527271859307,
+ "loss": 1.4428,
+ "step": 1230
+ },
+ {
+ "epoch": 1.2313463161514175,
+ "grad_norm": 0.5932227373123169,
+ "learning_rate": 0.0001276745574717481,
+ "loss": 1.1946,
+ "step": 1231
+ },
+ {
+ "epoch": 1.2323465974805414,
+ "grad_norm": 0.6067792177200317,
+ "learning_rate": 0.00012757381181556943,
+ "loss": 1.1963,
+ "step": 1232
+ },
+ {
+ "epoch": 1.2333468788096653,
+ "grad_norm": 0.6702238917350769,
+ "learning_rate": 0.0001274730358607583,
+ "loss": 1.4489,
+ "step": 1233
+ },
+ {
+ "epoch": 1.234347160138789,
+ "grad_norm": 0.6660708785057068,
+ "learning_rate": 0.00012737222971804924,
+ "loss": 1.4307,
+ "step": 1234
+ },
+ {
+ "epoch": 1.2353474414679129,
+ "grad_norm": 0.6711790561676025,
+ "learning_rate": 0.00012727139349821,
+ "loss": 1.4167,
+ "step": 1235
+ },
+ {
+ "epoch": 1.2363477227970368,
+ "grad_norm": 0.7145324349403381,
+ "learning_rate": 0.0001271705273120413,
+ "loss": 1.4125,
+ "step": 1236
+ },
+ {
+ "epoch": 1.2373480041261604,
+ "grad_norm": 0.6572199463844299,
+ "learning_rate": 0.00012706963127037685,
+ "loss": 1.2403,
+ "step": 1237
+ },
+ {
+ "epoch": 1.2383482854552843,
+ "grad_norm": 0.634173572063446,
+ "learning_rate": 0.00012696870548408316,
+ "loss": 1.2861,
+ "step": 1238
+ },
+ {
+ "epoch": 1.2393485667844082,
+ "grad_norm": 0.6740716695785522,
+ "learning_rate": 0.00012686775006405946,
+ "loss": 1.6377,
+ "step": 1239
+ },
+ {
+ "epoch": 1.2403488481135319,
+ "grad_norm": 0.7334823608398438,
+ "learning_rate": 0.00012676676512123747,
+ "loss": 1.4186,
+ "step": 1240
+ },
+ {
+ "epoch": 1.2413491294426557,
+ "grad_norm": 0.9025078415870667,
+ "learning_rate": 0.00012666575076658134,
+ "loss": 1.4633,
+ "step": 1241
+ },
+ {
+ "epoch": 1.2423494107717796,
+ "grad_norm": 0.7449138760566711,
+ "learning_rate": 0.00012656470711108764,
+ "loss": 1.5294,
+ "step": 1242
+ },
+ {
+ "epoch": 1.2433496921009033,
+ "grad_norm": 0.6459099054336548,
+ "learning_rate": 0.00012646363426578505,
+ "loss": 1.4404,
+ "step": 1243
+ },
+ {
+ "epoch": 1.2443499734300272,
+ "grad_norm": 0.762955904006958,
+ "learning_rate": 0.0001263625323417343,
+ "loss": 1.6486,
+ "step": 1244
+ },
+ {
+ "epoch": 1.245350254759151,
+ "grad_norm": 0.6792619228363037,
+ "learning_rate": 0.0001262614014500282,
+ "loss": 1.3946,
+ "step": 1245
+ },
+ {
+ "epoch": 1.2463505360882747,
+ "grad_norm": 0.7938026189804077,
+ "learning_rate": 0.00012616024170179126,
+ "loss": 1.5163,
+ "step": 1246
+ },
+ {
+ "epoch": 1.2473508174173986,
+ "grad_norm": 0.6673023104667664,
+ "learning_rate": 0.00012605905320817976,
+ "loss": 1.5599,
+ "step": 1247
+ },
+ {
+ "epoch": 1.2483510987465225,
+ "grad_norm": 0.6543686389923096,
+ "learning_rate": 0.00012595783608038155,
+ "loss": 1.3298,
+ "step": 1248
+ },
+ {
+ "epoch": 1.2493513800756464,
+ "grad_norm": 0.6570146083831787,
+ "learning_rate": 0.00012585659042961596,
+ "loss": 1.2528,
+ "step": 1249
+ },
+ {
+ "epoch": 1.25035166140477,
+ "grad_norm": 0.6886934638023376,
+ "learning_rate": 0.00012575531636713368,
+ "loss": 1.4567,
+ "step": 1250
+ },
+ {
+ "epoch": 1.251351942733894,
+ "grad_norm": 0.660229504108429,
+ "learning_rate": 0.00012565401400421651,
+ "loss": 1.3333,
+ "step": 1251
+ },
+ {
+ "epoch": 1.2523522240630176,
+ "grad_norm": 0.7553595900535583,
+ "learning_rate": 0.0001255526834521775,
+ "loss": 1.4152,
+ "step": 1252
+ },
+ {
+ "epoch": 1.2533525053921415,
+ "grad_norm": 0.6283079385757446,
+ "learning_rate": 0.00012545132482236055,
+ "loss": 1.3201,
+ "step": 1253
+ },
+ {
+ "epoch": 1.2543527867212654,
+ "grad_norm": 0.6704882383346558,
+ "learning_rate": 0.0001253499382261405,
+ "loss": 1.3711,
+ "step": 1254
+ },
+ {
+ "epoch": 1.2553530680503893,
+ "grad_norm": 0.7282963991165161,
+ "learning_rate": 0.00012524852377492285,
+ "loss": 1.5492,
+ "step": 1255
+ },
+ {
+ "epoch": 1.256353349379513,
+ "grad_norm": 0.7712034583091736,
+ "learning_rate": 0.00012514708158014378,
+ "loss": 1.4299,
+ "step": 1256
+ },
+ {
+ "epoch": 1.2573536307086368,
+ "grad_norm": 0.6231324076652527,
+ "learning_rate": 0.00012504561175326985,
+ "loss": 1.3492,
+ "step": 1257
+ },
+ {
+ "epoch": 1.2583539120377607,
+ "grad_norm": 0.7122861742973328,
+ "learning_rate": 0.00012494411440579814,
+ "loss": 1.4583,
+ "step": 1258
+ },
+ {
+ "epoch": 1.2593541933668844,
+ "grad_norm": 0.7797596454620361,
+ "learning_rate": 0.0001248425896492558,
+ "loss": 1.4185,
+ "step": 1259
+ },
+ {
+ "epoch": 1.2603544746960083,
+ "grad_norm": 0.7058592438697815,
+ "learning_rate": 0.00012474103759520027,
+ "loss": 1.7918,
+ "step": 1260
+ },
+ {
+ "epoch": 1.2613547560251321,
+ "grad_norm": 0.6629828214645386,
+ "learning_rate": 0.00012463945835521878,
+ "loss": 1.2547,
+ "step": 1261
+ },
+ {
+ "epoch": 1.2623550373542558,
+ "grad_norm": 0.6975031495094299,
+ "learning_rate": 0.0001245378520409286,
+ "loss": 1.6256,
+ "step": 1262
+ },
+ {
+ "epoch": 1.2633553186833797,
+ "grad_norm": 0.7939541935920715,
+ "learning_rate": 0.0001244362187639767,
+ "loss": 1.2817,
+ "step": 1263
+ },
+ {
+ "epoch": 1.2643556000125036,
+ "grad_norm": 0.7042918801307678,
+ "learning_rate": 0.00012433455863603967,
+ "loss": 1.4207,
+ "step": 1264
+ },
+ {
+ "epoch": 1.2653558813416272,
+ "grad_norm": 0.6327396631240845,
+ "learning_rate": 0.00012423287176882358,
+ "loss": 1.3927,
+ "step": 1265
+ },
+ {
+ "epoch": 1.2663561626707511,
+ "grad_norm": 0.5797626376152039,
+ "learning_rate": 0.00012413115827406392,
+ "loss": 1.2808,
+ "step": 1266
+ },
+ {
+ "epoch": 1.267356443999875,
+ "grad_norm": 0.6891800165176392,
+ "learning_rate": 0.00012402941826352546,
+ "loss": 1.2385,
+ "step": 1267
+ },
+ {
+ "epoch": 1.268356725328999,
+ "grad_norm": 0.6648104190826416,
+ "learning_rate": 0.00012392765184900202,
+ "loss": 1.5402,
+ "step": 1268
+ },
+ {
+ "epoch": 1.2693570066581226,
+ "grad_norm": 0.7340229749679565,
+ "learning_rate": 0.0001238258591423165,
+ "loss": 1.2926,
+ "step": 1269
+ },
+ {
+ "epoch": 1.2703572879872465,
+ "grad_norm": 0.670605480670929,
+ "learning_rate": 0.00012372404025532072,
+ "loss": 1.3863,
+ "step": 1270
+ },
+ {
+ "epoch": 1.2713575693163701,
+ "grad_norm": 0.7016957402229309,
+ "learning_rate": 0.00012362219529989514,
+ "loss": 1.7362,
+ "step": 1271
+ },
+ {
+ "epoch": 1.272357850645494,
+ "grad_norm": 0.6318536400794983,
+ "learning_rate": 0.00012352032438794902,
+ "loss": 1.4278,
+ "step": 1272
+ },
+ {
+ "epoch": 1.273358131974618,
+ "grad_norm": 0.5807138681411743,
+ "learning_rate": 0.00012341842763142005,
+ "loss": 1.4762,
+ "step": 1273
+ },
+ {
+ "epoch": 1.2743584133037418,
+ "grad_norm": 0.6634588241577148,
+ "learning_rate": 0.00012331650514227425,
+ "loss": 1.6209,
+ "step": 1274
+ },
+ {
+ "epoch": 1.2753586946328654,
+ "grad_norm": 0.709530770778656,
+ "learning_rate": 0.00012321455703250616,
+ "loss": 1.5209,
+ "step": 1275
+ },
+ {
+ "epoch": 1.2763589759619893,
+ "grad_norm": 0.678584098815918,
+ "learning_rate": 0.00012311258341413822,
+ "loss": 1.4247,
+ "step": 1276
+ },
+ {
+ "epoch": 1.277359257291113,
+ "grad_norm": 0.6134077310562134,
+ "learning_rate": 0.00012301058439922102,
+ "loss": 1.3792,
+ "step": 1277
+ },
+ {
+ "epoch": 1.2783595386202369,
+ "grad_norm": 0.694976806640625,
+ "learning_rate": 0.000122908560099833,
+ "loss": 1.5957,
+ "step": 1278
+ },
+ {
+ "epoch": 1.2793598199493608,
+ "grad_norm": 0.835444986820221,
+ "learning_rate": 0.00012280651062808047,
+ "loss": 1.4917,
+ "step": 1279
+ },
+ {
+ "epoch": 1.2803601012784847,
+ "grad_norm": 0.6491605043411255,
+ "learning_rate": 0.00012270443609609729,
+ "loss": 1.4417,
+ "step": 1280
+ },
+ {
+ "epoch": 1.2813603826076083,
+ "grad_norm": 0.6651148200035095,
+ "learning_rate": 0.0001226023366160449,
+ "loss": 1.2857,
+ "step": 1281
+ },
+ {
+ "epoch": 1.2823606639367322,
+ "grad_norm": 0.6736195683479309,
+ "learning_rate": 0.00012250021230011225,
+ "loss": 1.3431,
+ "step": 1282
+ },
+ {
+ "epoch": 1.283360945265856,
+ "grad_norm": 0.7162345051765442,
+ "learning_rate": 0.00012239806326051539,
+ "loss": 1.4009,
+ "step": 1283
+ },
+ {
+ "epoch": 1.2843612265949798,
+ "grad_norm": 0.8337399363517761,
+ "learning_rate": 0.00012229588960949771,
+ "loss": 1.5303,
+ "step": 1284
+ },
+ {
+ "epoch": 1.2853615079241036,
+ "grad_norm": 0.6648454666137695,
+ "learning_rate": 0.00012219369145932959,
+ "loss": 1.7088,
+ "step": 1285
+ },
+ {
+ "epoch": 1.2863617892532275,
+ "grad_norm": 0.6891435384750366,
+ "learning_rate": 0.00012209146892230822,
+ "loss": 1.4053,
+ "step": 1286
+ },
+ {
+ "epoch": 1.2873620705823514,
+ "grad_norm": 0.6659008860588074,
+ "learning_rate": 0.00012198922211075778,
+ "loss": 1.3959,
+ "step": 1287
+ },
+ {
+ "epoch": 1.288362351911475,
+ "grad_norm": 0.6926385164260864,
+ "learning_rate": 0.00012188695113702896,
+ "loss": 1.536,
+ "step": 1288
+ },
+ {
+ "epoch": 1.289362633240599,
+ "grad_norm": 0.6584843397140503,
+ "learning_rate": 0.00012178465611349911,
+ "loss": 1.5099,
+ "step": 1289
+ },
+ {
+ "epoch": 1.2903629145697226,
+ "grad_norm": 0.7430850267410278,
+ "learning_rate": 0.00012168233715257194,
+ "loss": 1.3367,
+ "step": 1290
+ },
+ {
+ "epoch": 1.2913631958988465,
+ "grad_norm": 0.8379004597663879,
+ "learning_rate": 0.00012157999436667747,
+ "loss": 1.3542,
+ "step": 1291
+ },
+ {
+ "epoch": 1.2923634772279704,
+ "grad_norm": 0.7217230796813965,
+ "learning_rate": 0.00012147762786827193,
+ "loss": 1.4465,
+ "step": 1292
+ },
+ {
+ "epoch": 1.2933637585570943,
+ "grad_norm": 0.7268504500389099,
+ "learning_rate": 0.00012137523776983757,
+ "loss": 1.2616,
+ "step": 1293
+ },
+ {
+ "epoch": 1.294364039886218,
+ "grad_norm": 0.7402834296226501,
+ "learning_rate": 0.00012127282418388264,
+ "loss": 1.271,
+ "step": 1294
+ },
+ {
+ "epoch": 1.2953643212153418,
+ "grad_norm": 0.6314610242843628,
+ "learning_rate": 0.0001211703872229411,
+ "loss": 1.3718,
+ "step": 1295
+ },
+ {
+ "epoch": 1.2963646025444655,
+ "grad_norm": 0.6106632947921753,
+ "learning_rate": 0.00012106792699957263,
+ "loss": 1.6404,
+ "step": 1296
+ },
+ {
+ "epoch": 1.2973648838735894,
+ "grad_norm": 0.6568691730499268,
+ "learning_rate": 0.00012096544362636255,
+ "loss": 1.3559,
+ "step": 1297
+ },
+ {
+ "epoch": 1.2983651652027133,
+ "grad_norm": 0.700645387172699,
+ "learning_rate": 0.00012086293721592152,
+ "loss": 1.5258,
+ "step": 1298
+ },
+ {
+ "epoch": 1.2993654465318372,
+ "grad_norm": 1.0722559690475464,
+ "learning_rate": 0.00012076040788088554,
+ "loss": 1.4921,
+ "step": 1299
+ },
+ {
+ "epoch": 1.3003657278609608,
+ "grad_norm": 0.6164102554321289,
+ "learning_rate": 0.00012065785573391581,
+ "loss": 1.3766,
+ "step": 1300
+ },
+ {
+ "epoch": 1.3013660091900847,
+ "grad_norm": 0.7066829204559326,
+ "learning_rate": 0.00012055528088769861,
+ "loss": 1.4322,
+ "step": 1301
+ },
+ {
+ "epoch": 1.3023662905192086,
+ "grad_norm": 0.6311153769493103,
+ "learning_rate": 0.00012045268345494511,
+ "loss": 1.2958,
+ "step": 1302
+ },
+ {
+ "epoch": 1.3033665718483323,
+ "grad_norm": 0.6254247426986694,
+ "learning_rate": 0.00012035006354839133,
+ "loss": 1.3125,
+ "step": 1303
+ },
+ {
+ "epoch": 1.3043668531774562,
+ "grad_norm": 0.7812719941139221,
+ "learning_rate": 0.00012024742128079805,
+ "loss": 1.5446,
+ "step": 1304
+ },
+ {
+ "epoch": 1.30536713450658,
+ "grad_norm": 0.6067742109298706,
+ "learning_rate": 0.00012014475676495052,
+ "loss": 1.2821,
+ "step": 1305
+ },
+ {
+ "epoch": 1.306367415835704,
+ "grad_norm": 0.6812780499458313,
+ "learning_rate": 0.00012004207011365849,
+ "loss": 1.2988,
+ "step": 1306
+ },
+ {
+ "epoch": 1.3073676971648276,
+ "grad_norm": 0.6978224515914917,
+ "learning_rate": 0.00011993936143975599,
+ "loss": 1.3677,
+ "step": 1307
+ },
+ {
+ "epoch": 1.3083679784939515,
+ "grad_norm": 0.660102903842926,
+ "learning_rate": 0.00011983663085610131,
+ "loss": 1.3688,
+ "step": 1308
+ },
+ {
+ "epoch": 1.3093682598230751,
+ "grad_norm": 0.706007182598114,
+ "learning_rate": 0.00011973387847557676,
+ "loss": 1.2638,
+ "step": 1309
+ },
+ {
+ "epoch": 1.310368541152199,
+ "grad_norm": 0.6711030602455139,
+ "learning_rate": 0.00011963110441108863,
+ "loss": 1.2569,
+ "step": 1310
+ },
+ {
+ "epoch": 1.311368822481323,
+ "grad_norm": 0.6825215816497803,
+ "learning_rate": 0.000119528308775567,
+ "loss": 1.3912,
+ "step": 1311
+ },
+ {
+ "epoch": 1.3123691038104468,
+ "grad_norm": 0.725979745388031,
+ "learning_rate": 0.00011942549168196575,
+ "loss": 1.331,
+ "step": 1312
+ },
+ {
+ "epoch": 1.3133693851395705,
+ "grad_norm": 0.6699597835540771,
+ "learning_rate": 0.00011932265324326221,
+ "loss": 1.4171,
+ "step": 1313
+ },
+ {
+ "epoch": 1.3143696664686944,
+ "grad_norm": 0.6116276383399963,
+ "learning_rate": 0.0001192197935724573,
+ "loss": 1.3333,
+ "step": 1314
+ },
+ {
+ "epoch": 1.315369947797818,
+ "grad_norm": 0.6644623875617981,
+ "learning_rate": 0.00011911691278257511,
+ "loss": 1.5515,
+ "step": 1315
+ },
+ {
+ "epoch": 1.316370229126942,
+ "grad_norm": 0.6456226110458374,
+ "learning_rate": 0.0001190140109866631,
+ "loss": 1.2691,
+ "step": 1316
+ },
+ {
+ "epoch": 1.3173705104560658,
+ "grad_norm": 0.6665071249008179,
+ "learning_rate": 0.00011891108829779165,
+ "loss": 1.3782,
+ "step": 1317
+ },
+ {
+ "epoch": 1.3183707917851897,
+ "grad_norm": 0.7108166813850403,
+ "learning_rate": 0.00011880814482905422,
+ "loss": 1.3122,
+ "step": 1318
+ },
+ {
+ "epoch": 1.3193710731143133,
+ "grad_norm": 0.7184962630271912,
+ "learning_rate": 0.00011870518069356709,
+ "loss": 1.3624,
+ "step": 1319
+ },
+ {
+ "epoch": 1.3203713544434372,
+ "grad_norm": 0.6516618132591248,
+ "learning_rate": 0.0001186021960044692,
+ "loss": 1.5654,
+ "step": 1320
+ },
+ {
+ "epoch": 1.3213716357725611,
+ "grad_norm": 0.6597375869750977,
+ "learning_rate": 0.00011849919087492211,
+ "loss": 1.4765,
+ "step": 1321
+ },
+ {
+ "epoch": 1.3223719171016848,
+ "grad_norm": 0.8731528520584106,
+ "learning_rate": 0.00011839616541810983,
+ "loss": 1.3876,
+ "step": 1322
+ },
+ {
+ "epoch": 1.3233721984308087,
+ "grad_norm": 0.6694337725639343,
+ "learning_rate": 0.00011829311974723867,
+ "loss": 1.327,
+ "step": 1323
+ },
+ {
+ "epoch": 1.3243724797599326,
+ "grad_norm": 0.7454182505607605,
+ "learning_rate": 0.00011819005397553723,
+ "loss": 1.4202,
+ "step": 1324
+ },
+ {
+ "epoch": 1.3253727610890562,
+ "grad_norm": 0.6783546209335327,
+ "learning_rate": 0.00011808696821625613,
+ "loss": 1.3966,
+ "step": 1325
+ },
+ {
+ "epoch": 1.32637304241818,
+ "grad_norm": 0.7126333713531494,
+ "learning_rate": 0.000117983862582668,
+ "loss": 1.3896,
+ "step": 1326
+ },
+ {
+ "epoch": 1.327373323747304,
+ "grad_norm": 0.6765009164810181,
+ "learning_rate": 0.00011788073718806725,
+ "loss": 1.3678,
+ "step": 1327
+ },
+ {
+ "epoch": 1.3283736050764277,
+ "grad_norm": 0.6767436861991882,
+ "learning_rate": 0.00011777759214577006,
+ "loss": 1.497,
+ "step": 1328
+ },
+ {
+ "epoch": 1.3293738864055515,
+ "grad_norm": 0.7058733105659485,
+ "learning_rate": 0.00011767442756911417,
+ "loss": 1.4992,
+ "step": 1329
+ },
+ {
+ "epoch": 1.3303741677346754,
+ "grad_norm": 0.6815193295478821,
+ "learning_rate": 0.00011757124357145881,
+ "loss": 1.4952,
+ "step": 1330
+ },
+ {
+ "epoch": 1.3313744490637993,
+ "grad_norm": 0.6513908505439758,
+ "learning_rate": 0.00011746804026618452,
+ "loss": 1.5202,
+ "step": 1331
+ },
+ {
+ "epoch": 1.332374730392923,
+ "grad_norm": 0.6763479113578796,
+ "learning_rate": 0.00011736481776669306,
+ "loss": 1.4547,
+ "step": 1332
+ },
+ {
+ "epoch": 1.3333750117220469,
+ "grad_norm": 0.6361576914787292,
+ "learning_rate": 0.00011726157618640728,
+ "loss": 1.3231,
+ "step": 1333
+ },
+ {
+ "epoch": 1.3343752930511705,
+ "grad_norm": 0.7247117161750793,
+ "learning_rate": 0.00011715831563877104,
+ "loss": 1.5126,
+ "step": 1334
+ },
+ {
+ "epoch": 1.3353755743802944,
+ "grad_norm": 0.8347336649894714,
+ "learning_rate": 0.00011705503623724898,
+ "loss": 1.5669,
+ "step": 1335
+ },
+ {
+ "epoch": 1.3363758557094183,
+ "grad_norm": 0.6969489455223083,
+ "learning_rate": 0.00011695173809532652,
+ "loss": 1.3646,
+ "step": 1336
+ },
+ {
+ "epoch": 1.3373761370385422,
+ "grad_norm": 0.6771052479743958,
+ "learning_rate": 0.00011684842132650957,
+ "loss": 1.6715,
+ "step": 1337
+ },
+ {
+ "epoch": 1.3383764183676659,
+ "grad_norm": 0.6224768161773682,
+ "learning_rate": 0.00011674508604432464,
+ "loss": 1.5712,
+ "step": 1338
+ },
+ {
+ "epoch": 1.3393766996967897,
+ "grad_norm": 0.6684542298316956,
+ "learning_rate": 0.00011664173236231848,
+ "loss": 1.5669,
+ "step": 1339
+ },
+ {
+ "epoch": 1.3403769810259134,
+ "grad_norm": 0.6805415153503418,
+ "learning_rate": 0.0001165383603940581,
+ "loss": 1.3648,
+ "step": 1340
+ },
+ {
+ "epoch": 1.3413772623550373,
+ "grad_norm": 0.7991671562194824,
+ "learning_rate": 0.00011643497025313061,
+ "loss": 1.3703,
+ "step": 1341
+ },
+ {
+ "epoch": 1.3423775436841612,
+ "grad_norm": 0.7023015022277832,
+ "learning_rate": 0.00011633156205314309,
+ "loss": 1.366,
+ "step": 1342
+ },
+ {
+ "epoch": 1.343377825013285,
+ "grad_norm": 0.7017894387245178,
+ "learning_rate": 0.00011622813590772244,
+ "loss": 1.4816,
+ "step": 1343
+ },
+ {
+ "epoch": 1.3443781063424087,
+ "grad_norm": 0.704626739025116,
+ "learning_rate": 0.00011612469193051525,
+ "loss": 1.2841,
+ "step": 1344
+ },
+ {
+ "epoch": 1.3453783876715326,
+ "grad_norm": 0.707372784614563,
+ "learning_rate": 0.00011602123023518779,
+ "loss": 1.3226,
+ "step": 1345
+ },
+ {
+ "epoch": 1.3463786690006565,
+ "grad_norm": 0.6367921233177185,
+ "learning_rate": 0.00011591775093542572,
+ "loss": 1.3292,
+ "step": 1346
+ },
+ {
+ "epoch": 1.3473789503297802,
+ "grad_norm": 0.7131246328353882,
+ "learning_rate": 0.0001158142541449341,
+ "loss": 1.3537,
+ "step": 1347
+ },
+ {
+ "epoch": 1.348379231658904,
+ "grad_norm": 0.6140089631080627,
+ "learning_rate": 0.00011571073997743716,
+ "loss": 1.4316,
+ "step": 1348
+ },
+ {
+ "epoch": 1.349379512988028,
+ "grad_norm": 0.6347403526306152,
+ "learning_rate": 0.0001156072085466783,
+ "loss": 1.4214,
+ "step": 1349
+ },
+ {
+ "epoch": 1.3503797943171518,
+ "grad_norm": 0.5540759563446045,
+ "learning_rate": 0.00011550365996641979,
+ "loss": 1.25,
+ "step": 1350
+ },
+ {
+ "epoch": 1.3513800756462755,
+ "grad_norm": 0.6721670031547546,
+ "learning_rate": 0.00011540009435044281,
+ "loss": 1.381,
+ "step": 1351
+ },
+ {
+ "epoch": 1.3523803569753994,
+ "grad_norm": 0.5901767015457153,
+ "learning_rate": 0.00011529651181254723,
+ "loss": 1.5127,
+ "step": 1352
+ },
+ {
+ "epoch": 1.353380638304523,
+ "grad_norm": 0.6438884139060974,
+ "learning_rate": 0.0001151929124665516,
+ "loss": 1.3348,
+ "step": 1353
+ },
+ {
+ "epoch": 1.354380919633647,
+ "grad_norm": 0.9631819725036621,
+ "learning_rate": 0.00011508929642629274,
+ "loss": 1.5822,
+ "step": 1354
+ },
+ {
+ "epoch": 1.3553812009627708,
+ "grad_norm": 0.6426034569740295,
+ "learning_rate": 0.00011498566380562601,
+ "loss": 1.3487,
+ "step": 1355
+ },
+ {
+ "epoch": 1.3563814822918947,
+ "grad_norm": 0.682264506816864,
+ "learning_rate": 0.0001148820147184249,
+ "loss": 1.3331,
+ "step": 1356
+ },
+ {
+ "epoch": 1.3573817636210184,
+ "grad_norm": 0.746907114982605,
+ "learning_rate": 0.00011477834927858104,
+ "loss": 1.38,
+ "step": 1357
+ },
+ {
+ "epoch": 1.3583820449501423,
+ "grad_norm": 0.7521925568580627,
+ "learning_rate": 0.00011467466760000399,
+ "loss": 1.3798,
+ "step": 1358
+ },
+ {
+ "epoch": 1.359382326279266,
+ "grad_norm": 0.7887171506881714,
+ "learning_rate": 0.00011457096979662114,
+ "loss": 1.3306,
+ "step": 1359
+ },
+ {
+ "epoch": 1.3603826076083898,
+ "grad_norm": 0.7623118162155151,
+ "learning_rate": 0.00011446725598237767,
+ "loss": 1.4307,
+ "step": 1360
+ },
+ {
+ "epoch": 1.3613828889375137,
+ "grad_norm": 0.6182076930999756,
+ "learning_rate": 0.00011436352627123623,
+ "loss": 1.3776,
+ "step": 1361
+ },
+ {
+ "epoch": 1.3623831702666376,
+ "grad_norm": 0.6136983633041382,
+ "learning_rate": 0.00011425978077717709,
+ "loss": 1.4815,
+ "step": 1362
+ },
+ {
+ "epoch": 1.3633834515957612,
+ "grad_norm": 0.6165998578071594,
+ "learning_rate": 0.00011415601961419775,
+ "loss": 1.4252,
+ "step": 1363
+ },
+ {
+ "epoch": 1.3643837329248851,
+ "grad_norm": 0.7338213324546814,
+ "learning_rate": 0.00011405224289631295,
+ "loss": 1.4426,
+ "step": 1364
+ },
+ {
+ "epoch": 1.365384014254009,
+ "grad_norm": 0.8061873316764832,
+ "learning_rate": 0.00011394845073755455,
+ "loss": 1.5366,
+ "step": 1365
+ },
+ {
+ "epoch": 1.3663842955831327,
+ "grad_norm": 0.8496206402778625,
+ "learning_rate": 0.0001138446432519714,
+ "loss": 1.3305,
+ "step": 1366
+ },
+ {
+ "epoch": 1.3673845769122566,
+ "grad_norm": 0.7370564341545105,
+ "learning_rate": 0.00011374082055362909,
+ "loss": 1.5069,
+ "step": 1367
+ },
+ {
+ "epoch": 1.3683848582413805,
+ "grad_norm": 0.630095362663269,
+ "learning_rate": 0.00011363698275661001,
+ "loss": 1.2846,
+ "step": 1368
+ },
+ {
+ "epoch": 1.3693851395705043,
+ "grad_norm": 0.7039877772331238,
+ "learning_rate": 0.00011353312997501313,
+ "loss": 1.4091,
+ "step": 1369
+ },
+ {
+ "epoch": 1.370385420899628,
+ "grad_norm": 0.7010135650634766,
+ "learning_rate": 0.00011342926232295386,
+ "loss": 1.4068,
+ "step": 1370
+ },
+ {
+ "epoch": 1.371385702228752,
+ "grad_norm": 0.6542472243309021,
+ "learning_rate": 0.00011332537991456398,
+ "loss": 1.3422,
+ "step": 1371
+ },
+ {
+ "epoch": 1.3723859835578756,
+ "grad_norm": 0.7767142057418823,
+ "learning_rate": 0.00011322148286399147,
+ "loss": 1.3156,
+ "step": 1372
+ },
+ {
+ "epoch": 1.3733862648869994,
+ "grad_norm": 0.7862635254859924,
+ "learning_rate": 0.0001131175712854004,
+ "loss": 1.3952,
+ "step": 1373
+ },
+ {
+ "epoch": 1.3743865462161233,
+ "grad_norm": 0.7444994449615479,
+ "learning_rate": 0.00011301364529297079,
+ "loss": 1.2304,
+ "step": 1374
+ },
+ {
+ "epoch": 1.3753868275452472,
+ "grad_norm": 0.6078478693962097,
+ "learning_rate": 0.0001129097050008985,
+ "loss": 1.4364,
+ "step": 1375
+ },
+ {
+ "epoch": 1.3763871088743709,
+ "grad_norm": 0.7011739015579224,
+ "learning_rate": 0.00011280575052339514,
+ "loss": 1.4455,
+ "step": 1376
+ },
+ {
+ "epoch": 1.3773873902034948,
+ "grad_norm": 0.7185930013656616,
+ "learning_rate": 0.00011270178197468789,
+ "loss": 1.349,
+ "step": 1377
+ },
+ {
+ "epoch": 1.3783876715326184,
+ "grad_norm": 0.6734675168991089,
+ "learning_rate": 0.00011259779946901934,
+ "loss": 1.4803,
+ "step": 1378
+ },
+ {
+ "epoch": 1.3793879528617423,
+ "grad_norm": 0.674439013004303,
+ "learning_rate": 0.0001124938031206475,
+ "loss": 1.5707,
+ "step": 1379
+ },
+ {
+ "epoch": 1.3803882341908662,
+ "grad_norm": 0.6843717694282532,
+ "learning_rate": 0.00011238979304384554,
+ "loss": 1.5508,
+ "step": 1380
+ },
+ {
+ "epoch": 1.38138851551999,
+ "grad_norm": 0.6178708672523499,
+ "learning_rate": 0.0001122857693529017,
+ "loss": 1.2827,
+ "step": 1381
+ },
+ {
+ "epoch": 1.3823887968491138,
+ "grad_norm": 0.721108078956604,
+ "learning_rate": 0.0001121817321621192,
+ "loss": 1.3768,
+ "step": 1382
+ },
+ {
+ "epoch": 1.3833890781782376,
+ "grad_norm": 0.6790196299552917,
+ "learning_rate": 0.00011207768158581613,
+ "loss": 1.508,
+ "step": 1383
+ },
+ {
+ "epoch": 1.3843893595073615,
+ "grad_norm": 0.6942607760429382,
+ "learning_rate": 0.00011197361773832525,
+ "loss": 1.2476,
+ "step": 1384
+ },
+ {
+ "epoch": 1.3853896408364852,
+ "grad_norm": 0.6791447997093201,
+ "learning_rate": 0.00011186954073399387,
+ "loss": 1.5537,
+ "step": 1385
+ },
+ {
+ "epoch": 1.386389922165609,
+ "grad_norm": 0.6842163801193237,
+ "learning_rate": 0.00011176545068718385,
+ "loss": 1.4336,
+ "step": 1386
+ },
+ {
+ "epoch": 1.387390203494733,
+ "grad_norm": 0.6922981142997742,
+ "learning_rate": 0.0001116613477122713,
+ "loss": 1.2496,
+ "step": 1387
+ },
+ {
+ "epoch": 1.3883904848238566,
+ "grad_norm": 0.7480785250663757,
+ "learning_rate": 0.00011155723192364658,
+ "loss": 1.5798,
+ "step": 1388
+ },
+ {
+ "epoch": 1.3893907661529805,
+ "grad_norm": 0.6371482610702515,
+ "learning_rate": 0.00011145310343571411,
+ "loss": 1.326,
+ "step": 1389
+ },
+ {
+ "epoch": 1.3903910474821044,
+ "grad_norm": 0.663810670375824,
+ "learning_rate": 0.00011134896236289224,
+ "loss": 1.3021,
+ "step": 1390
+ },
+ {
+ "epoch": 1.391391328811228,
+ "grad_norm": 0.6067004799842834,
+ "learning_rate": 0.0001112448088196132,
+ "loss": 1.5062,
+ "step": 1391
+ },
+ {
+ "epoch": 1.392391610140352,
+ "grad_norm": 0.6500088572502136,
+ "learning_rate": 0.00011114064292032282,
+ "loss": 1.3196,
+ "step": 1392
+ },
+ {
+ "epoch": 1.3933918914694758,
+ "grad_norm": 0.7117498517036438,
+ "learning_rate": 0.0001110364647794807,
+ "loss": 1.354,
+ "step": 1393
+ },
+ {
+ "epoch": 1.3943921727985997,
+ "grad_norm": 0.5792518854141235,
+ "learning_rate": 0.00011093227451155974,
+ "loss": 1.1684,
+ "step": 1394
+ },
+ {
+ "epoch": 1.3953924541277234,
+ "grad_norm": 0.6920313835144043,
+ "learning_rate": 0.0001108280722310462,
+ "loss": 1.5114,
+ "step": 1395
+ },
+ {
+ "epoch": 1.3963927354568473,
+ "grad_norm": 0.5932325720787048,
+ "learning_rate": 0.0001107238580524395,
+ "loss": 1.4519,
+ "step": 1396
+ },
+ {
+ "epoch": 1.397393016785971,
+ "grad_norm": 0.7241511940956116,
+ "learning_rate": 0.00011061963209025223,
+ "loss": 1.4131,
+ "step": 1397
+ },
+ {
+ "epoch": 1.3983932981150948,
+ "grad_norm": 0.6144927740097046,
+ "learning_rate": 0.00011051539445900983,
+ "loss": 1.4436,
+ "step": 1398
+ },
+ {
+ "epoch": 1.3993935794442187,
+ "grad_norm": 0.589124321937561,
+ "learning_rate": 0.00011041114527325065,
+ "loss": 1.4069,
+ "step": 1399
+ },
+ {
+ "epoch": 1.4003938607733426,
+ "grad_norm": 0.6695122122764587,
+ "learning_rate": 0.00011030688464752566,
+ "loss": 1.6472,
+ "step": 1400
+ },
+ {
+ "epoch": 1.4013941421024663,
+ "grad_norm": 0.6082026362419128,
+ "learning_rate": 0.00011020261269639842,
+ "loss": 1.3188,
+ "step": 1401
+ },
+ {
+ "epoch": 1.4023944234315902,
+ "grad_norm": 0.8926504254341125,
+ "learning_rate": 0.000110098329534445,
+ "loss": 1.4195,
+ "step": 1402
+ },
+ {
+ "epoch": 1.4033947047607138,
+ "grad_norm": 0.6825796365737915,
+ "learning_rate": 0.00010999403527625367,
+ "loss": 1.5214,
+ "step": 1403
+ },
+ {
+ "epoch": 1.4043949860898377,
+ "grad_norm": 0.6856653690338135,
+ "learning_rate": 0.00010988973003642499,
+ "loss": 1.4579,
+ "step": 1404
+ },
+ {
+ "epoch": 1.4053952674189616,
+ "grad_norm": 0.6756052374839783,
+ "learning_rate": 0.00010978541392957156,
+ "loss": 1.331,
+ "step": 1405
+ },
+ {
+ "epoch": 1.4063955487480855,
+ "grad_norm": 0.6562577486038208,
+ "learning_rate": 0.00010968108707031792,
+ "loss": 1.2906,
+ "step": 1406
+ },
+ {
+ "epoch": 1.4073958300772091,
+ "grad_norm": 0.7208355069160461,
+ "learning_rate": 0.00010957674957330042,
+ "loss": 1.5163,
+ "step": 1407
+ },
+ {
+ "epoch": 1.408396111406333,
+ "grad_norm": 0.6576356291770935,
+ "learning_rate": 0.00010947240155316707,
+ "loss": 1.3934,
+ "step": 1408
+ },
+ {
+ "epoch": 1.409396392735457,
+ "grad_norm": 0.6244364976882935,
+ "learning_rate": 0.00010936804312457749,
+ "loss": 1.3903,
+ "step": 1409
+ },
+ {
+ "epoch": 1.4103966740645806,
+ "grad_norm": 0.5791237354278564,
+ "learning_rate": 0.00010926367440220276,
+ "loss": 1.2708,
+ "step": 1410
+ },
+ {
+ "epoch": 1.4113969553937045,
+ "grad_norm": 0.7043458819389343,
+ "learning_rate": 0.00010915929550072517,
+ "loss": 1.2446,
+ "step": 1411
+ },
+ {
+ "epoch": 1.4123972367228284,
+ "grad_norm": 0.5865835547447205,
+ "learning_rate": 0.00010905490653483827,
+ "loss": 1.657,
+ "step": 1412
+ },
+ {
+ "epoch": 1.4133975180519522,
+ "grad_norm": 0.6033587455749512,
+ "learning_rate": 0.00010895050761924668,
+ "loss": 1.4481,
+ "step": 1413
+ },
+ {
+ "epoch": 1.414397799381076,
+ "grad_norm": 0.7100054025650024,
+ "learning_rate": 0.00010884609886866588,
+ "loss": 1.5413,
+ "step": 1414
+ },
+ {
+ "epoch": 1.4153980807101998,
+ "grad_norm": 0.8067667484283447,
+ "learning_rate": 0.00010874168039782227,
+ "loss": 1.4327,
+ "step": 1415
+ },
+ {
+ "epoch": 1.4163983620393235,
+ "grad_norm": 0.7058808207511902,
+ "learning_rate": 0.00010863725232145286,
+ "loss": 1.534,
+ "step": 1416
+ },
+ {
+ "epoch": 1.4173986433684473,
+ "grad_norm": 0.5253664255142212,
+ "learning_rate": 0.00010853281475430517,
+ "loss": 1.0816,
+ "step": 1417
+ },
+ {
+ "epoch": 1.4183989246975712,
+ "grad_norm": 0.6874144673347473,
+ "learning_rate": 0.0001084283678111372,
+ "loss": 1.4386,
+ "step": 1418
+ },
+ {
+ "epoch": 1.4193992060266951,
+ "grad_norm": 0.74046790599823,
+ "learning_rate": 0.00010832391160671729,
+ "loss": 1.3393,
+ "step": 1419
+ },
+ {
+ "epoch": 1.4203994873558188,
+ "grad_norm": 0.6461816430091858,
+ "learning_rate": 0.00010821944625582392,
+ "loss": 1.5251,
+ "step": 1420
+ },
+ {
+ "epoch": 1.4213997686849427,
+ "grad_norm": 0.6058275699615479,
+ "learning_rate": 0.00010811497187324555,
+ "loss": 1.1555,
+ "step": 1421
+ },
+ {
+ "epoch": 1.4224000500140663,
+ "grad_norm": 0.6121847033500671,
+ "learning_rate": 0.00010801048857378071,
+ "loss": 1.248,
+ "step": 1422
+ },
+ {
+ "epoch": 1.4234003313431902,
+ "grad_norm": 0.5949802398681641,
+ "learning_rate": 0.00010790599647223763,
+ "loss": 1.5056,
+ "step": 1423
+ },
+ {
+ "epoch": 1.424400612672314,
+ "grad_norm": 0.6441097259521484,
+ "learning_rate": 0.0001078014956834342,
+ "loss": 1.5484,
+ "step": 1424
+ },
+ {
+ "epoch": 1.425400894001438,
+ "grad_norm": 0.686570942401886,
+ "learning_rate": 0.00010769698632219794,
+ "loss": 1.631,
+ "step": 1425
+ },
+ {
+ "epoch": 1.4264011753305617,
+ "grad_norm": 0.675699770450592,
+ "learning_rate": 0.00010759246850336572,
+ "loss": 1.4044,
+ "step": 1426
+ },
+ {
+ "epoch": 1.4274014566596855,
+ "grad_norm": 0.7777390480041504,
+ "learning_rate": 0.0001074879423417837,
+ "loss": 1.6326,
+ "step": 1427
+ },
+ {
+ "epoch": 1.4284017379888094,
+ "grad_norm": 0.6218550205230713,
+ "learning_rate": 0.00010738340795230721,
+ "loss": 1.5341,
+ "step": 1428
+ },
+ {
+ "epoch": 1.429402019317933,
+ "grad_norm": 0.7164304256439209,
+ "learning_rate": 0.00010727886544980068,
+ "loss": 1.5888,
+ "step": 1429
+ },
+ {
+ "epoch": 1.430402300647057,
+ "grad_norm": 0.6924182772636414,
+ "learning_rate": 0.00010717431494913741,
+ "loss": 1.2542,
+ "step": 1430
+ },
+ {
+ "epoch": 1.4314025819761809,
+ "grad_norm": 0.6145774126052856,
+ "learning_rate": 0.00010706975656519946,
+ "loss": 1.4038,
+ "step": 1431
+ },
+ {
+ "epoch": 1.4324028633053048,
+ "grad_norm": 0.5560014843940735,
+ "learning_rate": 0.00010696519041287765,
+ "loss": 1.2659,
+ "step": 1432
+ },
+ {
+ "epoch": 1.4334031446344284,
+ "grad_norm": 0.6854751110076904,
+ "learning_rate": 0.0001068606166070712,
+ "loss": 1.4407,
+ "step": 1433
+ },
+ {
+ "epoch": 1.4344034259635523,
+ "grad_norm": 0.6936755180358887,
+ "learning_rate": 0.00010675603526268785,
+ "loss": 1.4219,
+ "step": 1434
+ },
+ {
+ "epoch": 1.435403707292676,
+ "grad_norm": 0.8174700736999512,
+ "learning_rate": 0.00010665144649464356,
+ "loss": 1.397,
+ "step": 1435
+ },
+ {
+ "epoch": 1.4364039886217999,
+ "grad_norm": 0.7231045365333557,
+ "learning_rate": 0.00010654685041786249,
+ "loss": 1.4558,
+ "step": 1436
+ },
+ {
+ "epoch": 1.4374042699509237,
+ "grad_norm": 0.6431431174278259,
+ "learning_rate": 0.00010644224714727681,
+ "loss": 1.3522,
+ "step": 1437
+ },
+ {
+ "epoch": 1.4384045512800476,
+ "grad_norm": 0.7025414109230042,
+ "learning_rate": 0.0001063376367978266,
+ "loss": 1.2723,
+ "step": 1438
+ },
+ {
+ "epoch": 1.4394048326091713,
+ "grad_norm": 0.6382789611816406,
+ "learning_rate": 0.00010623301948445971,
+ "loss": 1.4065,
+ "step": 1439
+ },
+ {
+ "epoch": 1.4404051139382952,
+ "grad_norm": 0.7055328488349915,
+ "learning_rate": 0.00010612839532213164,
+ "loss": 1.6186,
+ "step": 1440
+ },
+ {
+ "epoch": 1.4414053952674188,
+ "grad_norm": 0.6769623160362244,
+ "learning_rate": 0.00010602376442580544,
+ "loss": 1.49,
+ "step": 1441
+ },
+ {
+ "epoch": 1.4424056765965427,
+ "grad_norm": 0.6875097751617432,
+ "learning_rate": 0.00010591912691045152,
+ "loss": 1.3063,
+ "step": 1442
+ },
+ {
+ "epoch": 1.4434059579256666,
+ "grad_norm": 0.7747283577919006,
+ "learning_rate": 0.00010581448289104758,
+ "loss": 1.67,
+ "step": 1443
+ },
+ {
+ "epoch": 1.4444062392547905,
+ "grad_norm": 0.7236614227294922,
+ "learning_rate": 0.00010570983248257853,
+ "loss": 1.4703,
+ "step": 1444
+ },
+ {
+ "epoch": 1.4454065205839142,
+ "grad_norm": 0.7141956686973572,
+ "learning_rate": 0.00010560517580003617,
+ "loss": 1.5828,
+ "step": 1445
+ },
+ {
+ "epoch": 1.446406801913038,
+ "grad_norm": 0.679790198802948,
+ "learning_rate": 0.00010550051295841931,
+ "loss": 1.4706,
+ "step": 1446
+ },
+ {
+ "epoch": 1.447407083242162,
+ "grad_norm": 0.7930448651313782,
+ "learning_rate": 0.00010539584407273349,
+ "loss": 1.5388,
+ "step": 1447
+ },
+ {
+ "epoch": 1.4484073645712856,
+ "grad_norm": 0.7099994421005249,
+ "learning_rate": 0.00010529116925799085,
+ "loss": 1.408,
+ "step": 1448
+ },
+ {
+ "epoch": 1.4494076459004095,
+ "grad_norm": 0.6459046602249146,
+ "learning_rate": 0.00010518648862921012,
+ "loss": 1.3263,
+ "step": 1449
+ },
+ {
+ "epoch": 1.4504079272295334,
+ "grad_norm": 0.761446475982666,
+ "learning_rate": 0.00010508180230141635,
+ "loss": 1.3166,
+ "step": 1450
+ },
+ {
+ "epoch": 1.451408208558657,
+ "grad_norm": 0.6198295950889587,
+ "learning_rate": 0.00010497711038964086,
+ "loss": 1.3191,
+ "step": 1451
+ },
+ {
+ "epoch": 1.452408489887781,
+ "grad_norm": 0.6751184463500977,
+ "learning_rate": 0.0001048724130089212,
+ "loss": 1.1869,
+ "step": 1452
+ },
+ {
+ "epoch": 1.4534087712169048,
+ "grad_norm": 0.6853645443916321,
+ "learning_rate": 0.00010476771027430086,
+ "loss": 1.2579,
+ "step": 1453
+ },
+ {
+ "epoch": 1.4544090525460285,
+ "grad_norm": 0.6402629017829895,
+ "learning_rate": 0.00010466300230082911,
+ "loss": 1.3192,
+ "step": 1454
+ },
+ {
+ "epoch": 1.4554093338751524,
+ "grad_norm": 0.7862108945846558,
+ "learning_rate": 0.00010455828920356115,
+ "loss": 1.3619,
+ "step": 1455
+ },
+ {
+ "epoch": 1.4564096152042763,
+ "grad_norm": 0.7008057832717896,
+ "learning_rate": 0.00010445357109755771,
+ "loss": 1.3524,
+ "step": 1456
+ },
+ {
+ "epoch": 1.4574098965334001,
+ "grad_norm": 0.6477895379066467,
+ "learning_rate": 0.00010434884809788508,
+ "loss": 1.6172,
+ "step": 1457
+ },
+ {
+ "epoch": 1.4584101778625238,
+ "grad_norm": 0.6312345862388611,
+ "learning_rate": 0.00010424412031961484,
+ "loss": 1.2121,
+ "step": 1458
+ },
+ {
+ "epoch": 1.4594104591916477,
+ "grad_norm": 0.6922104358673096,
+ "learning_rate": 0.00010413938787782394,
+ "loss": 1.3243,
+ "step": 1459
+ },
+ {
+ "epoch": 1.4604107405207714,
+ "grad_norm": 0.670599102973938,
+ "learning_rate": 0.00010403465088759437,
+ "loss": 1.346,
+ "step": 1460
+ },
+ {
+ "epoch": 1.4614110218498952,
+ "grad_norm": 0.7351789474487305,
+ "learning_rate": 0.00010392990946401313,
+ "loss": 1.5343,
+ "step": 1461
+ },
+ {
+ "epoch": 1.4624113031790191,
+ "grad_norm": 0.6756190061569214,
+ "learning_rate": 0.00010382516372217215,
+ "loss": 1.4118,
+ "step": 1462
+ },
+ {
+ "epoch": 1.463411584508143,
+ "grad_norm": 0.6219121217727661,
+ "learning_rate": 0.000103720413777168,
+ "loss": 1.3602,
+ "step": 1463
+ },
+ {
+ "epoch": 1.4644118658372667,
+ "grad_norm": 0.6602663993835449,
+ "learning_rate": 0.00010361565974410192,
+ "loss": 1.4921,
+ "step": 1464
+ },
+ {
+ "epoch": 1.4654121471663906,
+ "grad_norm": 0.6103453636169434,
+ "learning_rate": 0.00010351090173807969,
+ "loss": 1.3259,
+ "step": 1465
+ },
+ {
+ "epoch": 1.4664124284955142,
+ "grad_norm": 0.745473325252533,
+ "learning_rate": 0.00010340613987421137,
+ "loss": 1.6036,
+ "step": 1466
+ },
+ {
+ "epoch": 1.4674127098246381,
+ "grad_norm": 0.6537976861000061,
+ "learning_rate": 0.00010330137426761135,
+ "loss": 1.3511,
+ "step": 1467
+ },
+ {
+ "epoch": 1.468412991153762,
+ "grad_norm": 0.7108463048934937,
+ "learning_rate": 0.00010319660503339808,
+ "loss": 1.4814,
+ "step": 1468
+ },
+ {
+ "epoch": 1.469413272482886,
+ "grad_norm": 0.6372820734977722,
+ "learning_rate": 0.00010309183228669397,
+ "loss": 1.468,
+ "step": 1469
+ },
+ {
+ "epoch": 1.4704135538120096,
+ "grad_norm": 0.6098326444625854,
+ "learning_rate": 0.00010298705614262532,
+ "loss": 1.6763,
+ "step": 1470
+ },
+ {
+ "epoch": 1.4714138351411334,
+ "grad_norm": 0.6385009288787842,
+ "learning_rate": 0.0001028822767163222,
+ "loss": 1.3058,
+ "step": 1471
+ },
+ {
+ "epoch": 1.4724141164702573,
+ "grad_norm": 0.6848032474517822,
+ "learning_rate": 0.00010277749412291824,
+ "loss": 1.4115,
+ "step": 1472
+ },
+ {
+ "epoch": 1.473414397799381,
+ "grad_norm": 0.7532572746276855,
+ "learning_rate": 0.00010267270847755048,
+ "loss": 1.4219,
+ "step": 1473
+ },
+ {
+ "epoch": 1.4744146791285049,
+ "grad_norm": 0.7336605787277222,
+ "learning_rate": 0.00010256791989535952,
+ "loss": 1.4092,
+ "step": 1474
+ },
+ {
+ "epoch": 1.4754149604576288,
+ "grad_norm": 0.6300507187843323,
+ "learning_rate": 0.00010246312849148899,
+ "loss": 1.2911,
+ "step": 1475
+ },
+ {
+ "epoch": 1.4764152417867527,
+ "grad_norm": 0.7114218473434448,
+ "learning_rate": 0.00010235833438108571,
+ "loss": 1.5038,
+ "step": 1476
+ },
+ {
+ "epoch": 1.4774155231158763,
+ "grad_norm": 0.7215398550033569,
+ "learning_rate": 0.00010225353767929944,
+ "loss": 1.4919,
+ "step": 1477
+ },
+ {
+ "epoch": 1.4784158044450002,
+ "grad_norm": 0.6189507246017456,
+ "learning_rate": 0.00010214873850128282,
+ "loss": 1.2092,
+ "step": 1478
+ },
+ {
+ "epoch": 1.4794160857741239,
+ "grad_norm": 0.5806283950805664,
+ "learning_rate": 0.00010204393696219117,
+ "loss": 1.2862,
+ "step": 1479
+ },
+ {
+ "epoch": 1.4804163671032478,
+ "grad_norm": 0.7068900465965271,
+ "learning_rate": 0.00010193913317718244,
+ "loss": 1.319,
+ "step": 1480
+ },
+ {
+ "epoch": 1.4814166484323716,
+ "grad_norm": 0.749792218208313,
+ "learning_rate": 0.00010183432726141706,
+ "loss": 1.3661,
+ "step": 1481
+ },
+ {
+ "epoch": 1.4824169297614955,
+ "grad_norm": 0.7314055562019348,
+ "learning_rate": 0.00010172951933005775,
+ "loss": 1.5695,
+ "step": 1482
+ },
+ {
+ "epoch": 1.4834172110906192,
+ "grad_norm": 0.6871920228004456,
+ "learning_rate": 0.00010162470949826948,
+ "loss": 1.3598,
+ "step": 1483
+ },
+ {
+ "epoch": 1.484417492419743,
+ "grad_norm": 0.7139384150505066,
+ "learning_rate": 0.0001015198978812193,
+ "loss": 1.4942,
+ "step": 1484
+ },
+ {
+ "epoch": 1.4854177737488667,
+ "grad_norm": 0.6459400653839111,
+ "learning_rate": 0.00010141508459407623,
+ "loss": 1.3971,
+ "step": 1485
+ },
+ {
+ "epoch": 1.4864180550779906,
+ "grad_norm": 0.8157202005386353,
+ "learning_rate": 0.0001013102697520111,
+ "loss": 1.4679,
+ "step": 1486
+ },
+ {
+ "epoch": 1.4874183364071145,
+ "grad_norm": 0.6978387832641602,
+ "learning_rate": 0.00010120545347019647,
+ "loss": 1.4547,
+ "step": 1487
+ },
+ {
+ "epoch": 1.4884186177362384,
+ "grad_norm": 0.641835629940033,
+ "learning_rate": 0.00010110063586380646,
+ "loss": 1.6611,
+ "step": 1488
+ },
+ {
+ "epoch": 1.489418899065362,
+ "grad_norm": 0.723709225654602,
+ "learning_rate": 0.00010099581704801673,
+ "loss": 1.3994,
+ "step": 1489
+ },
+ {
+ "epoch": 1.490419180394486,
+ "grad_norm": 0.6613619327545166,
+ "learning_rate": 0.00010089099713800414,
+ "loss": 1.5722,
+ "step": 1490
+ },
+ {
+ "epoch": 1.4914194617236098,
+ "grad_norm": 0.6406750082969666,
+ "learning_rate": 0.00010078617624894684,
+ "loss": 1.312,
+ "step": 1491
+ },
+ {
+ "epoch": 1.4924197430527335,
+ "grad_norm": 0.5216225385665894,
+ "learning_rate": 0.000100681354496024,
+ "loss": 1.3552,
+ "step": 1492
+ },
+ {
+ "epoch": 1.4934200243818574,
+ "grad_norm": 0.7549086809158325,
+ "learning_rate": 0.00010057653199441581,
+ "loss": 1.4344,
+ "step": 1493
+ },
+ {
+ "epoch": 1.4944203057109813,
+ "grad_norm": 0.6958007216453552,
+ "learning_rate": 0.00010047170885930324,
+ "loss": 1.254,
+ "step": 1494
+ },
+ {
+ "epoch": 1.4954205870401052,
+ "grad_norm": 0.706564724445343,
+ "learning_rate": 0.00010036688520586788,
+ "loss": 1.4854,
+ "step": 1495
+ },
+ {
+ "epoch": 1.4964208683692288,
+ "grad_norm": 0.6802704930305481,
+ "learning_rate": 0.00010026206114929209,
+ "loss": 1.4631,
+ "step": 1496
+ },
+ {
+ "epoch": 1.4974211496983527,
+ "grad_norm": 0.645449697971344,
+ "learning_rate": 0.00010015723680475846,
+ "loss": 1.5165,
+ "step": 1497
+ },
+ {
+ "epoch": 1.4984214310274764,
+ "grad_norm": 0.5729085206985474,
+ "learning_rate": 0.00010005241228745004,
+ "loss": 1.2683,
+ "step": 1498
+ },
+ {
+ "epoch": 1.4994217123566003,
+ "grad_norm": 0.6592169404029846,
+ "learning_rate": 9.994758771254997e-05,
+ "loss": 1.4722,
+ "step": 1499
+ },
+ {
+ "epoch": 1.5004219936857242,
+ "grad_norm": 0.6299737691879272,
+ "learning_rate": 9.984276319524154e-05,
+ "loss": 1.3664,
+ "step": 1500
+ },
+ {
+ "epoch": 1.501422275014848,
+ "grad_norm": 0.6960833072662354,
+ "learning_rate": 9.973793885070792e-05,
+ "loss": 1.3959,
+ "step": 1501
+ },
+ {
+ "epoch": 1.5024225563439717,
+ "grad_norm": 0.7174006700515747,
+ "learning_rate": 9.963311479413211e-05,
+ "loss": 1.5448,
+ "step": 1502
+ },
+ {
+ "epoch": 1.5034228376730956,
+ "grad_norm": 0.6396325826644897,
+ "learning_rate": 9.95282911406968e-05,
+ "loss": 1.5168,
+ "step": 1503
+ },
+ {
+ "epoch": 1.5044231190022193,
+ "grad_norm": 0.809868335723877,
+ "learning_rate": 9.942346800558421e-05,
+ "loss": 1.4467,
+ "step": 1504
+ },
+ {
+ "epoch": 1.5054234003313431,
+ "grad_norm": 0.6106623411178589,
+ "learning_rate": 9.931864550397601e-05,
+ "loss": 1.519,
+ "step": 1505
+ },
+ {
+ "epoch": 1.506423681660467,
+ "grad_norm": 0.7253887057304382,
+ "learning_rate": 9.921382375105318e-05,
+ "loss": 1.3442,
+ "step": 1506
+ },
+ {
+ "epoch": 1.507423962989591,
+ "grad_norm": 0.6945338249206543,
+ "learning_rate": 9.910900286199587e-05,
+ "loss": 1.5319,
+ "step": 1507
+ },
+ {
+ "epoch": 1.5084242443187148,
+ "grad_norm": 0.682486891746521,
+ "learning_rate": 9.900418295198328e-05,
+ "loss": 1.3558,
+ "step": 1508
+ },
+ {
+ "epoch": 1.5094245256478385,
+ "grad_norm": 0.6952700018882751,
+ "learning_rate": 9.889936413619356e-05,
+ "loss": 1.3194,
+ "step": 1509
+ },
+ {
+ "epoch": 1.5104248069769621,
+ "grad_norm": 0.6775678992271423,
+ "learning_rate": 9.879454652980358e-05,
+ "loss": 1.5936,
+ "step": 1510
+ },
+ {
+ "epoch": 1.511425088306086,
+ "grad_norm": 0.8723187446594238,
+ "learning_rate": 9.868973024798895e-05,
+ "loss": 1.4172,
+ "step": 1511
+ },
+ {
+ "epoch": 1.51242536963521,
+ "grad_norm": 0.8881109952926636,
+ "learning_rate": 9.858491540592382e-05,
+ "loss": 1.4405,
+ "step": 1512
+ },
+ {
+ "epoch": 1.5134256509643338,
+ "grad_norm": 0.6580207347869873,
+ "learning_rate": 9.848010211878074e-05,
+ "loss": 1.6241,
+ "step": 1513
+ },
+ {
+ "epoch": 1.5144259322934577,
+ "grad_norm": 0.6160255074501038,
+ "learning_rate": 9.837529050173052e-05,
+ "loss": 1.4222,
+ "step": 1514
+ },
+ {
+ "epoch": 1.5154262136225813,
+ "grad_norm": 0.7516399025917053,
+ "learning_rate": 9.827048066994225e-05,
+ "loss": 1.2495,
+ "step": 1515
+ },
+ {
+ "epoch": 1.516426494951705,
+ "grad_norm": 0.5907468795776367,
+ "learning_rate": 9.816567273858296e-05,
+ "loss": 1.1453,
+ "step": 1516
+ },
+ {
+ "epoch": 1.517426776280829,
+ "grad_norm": 0.7371746301651001,
+ "learning_rate": 9.806086682281758e-05,
+ "loss": 1.5006,
+ "step": 1517
+ },
+ {
+ "epoch": 1.5184270576099528,
+ "grad_norm": 0.6646453142166138,
+ "learning_rate": 9.795606303780885e-05,
+ "loss": 1.5237,
+ "step": 1518
+ },
+ {
+ "epoch": 1.5194273389390767,
+ "grad_norm": 0.7043606638908386,
+ "learning_rate": 9.785126149871722e-05,
+ "loss": 1.3339,
+ "step": 1519
+ },
+ {
+ "epoch": 1.5204276202682006,
+ "grad_norm": 0.8110997676849365,
+ "learning_rate": 9.77464623207006e-05,
+ "loss": 1.5193,
+ "step": 1520
+ },
+ {
+ "epoch": 1.5214279015973242,
+ "grad_norm": 0.6463339328765869,
+ "learning_rate": 9.764166561891432e-05,
+ "loss": 1.412,
+ "step": 1521
+ },
+ {
+ "epoch": 1.522428182926448,
+ "grad_norm": 0.7538262605667114,
+ "learning_rate": 9.753687150851102e-05,
+ "loss": 1.5326,
+ "step": 1522
+ },
+ {
+ "epoch": 1.5234284642555718,
+ "grad_norm": 0.7361929416656494,
+ "learning_rate": 9.74320801046405e-05,
+ "loss": 1.4104,
+ "step": 1523
+ },
+ {
+ "epoch": 1.5244287455846957,
+ "grad_norm": 0.7260544896125793,
+ "learning_rate": 9.732729152244953e-05,
+ "loss": 1.6037,
+ "step": 1524
+ },
+ {
+ "epoch": 1.5254290269138195,
+ "grad_norm": 0.6636849641799927,
+ "learning_rate": 9.722250587708181e-05,
+ "loss": 1.2908,
+ "step": 1525
+ },
+ {
+ "epoch": 1.5264293082429434,
+ "grad_norm": 0.7487931251525879,
+ "learning_rate": 9.711772328367784e-05,
+ "loss": 1.5143,
+ "step": 1526
+ },
+ {
+ "epoch": 1.527429589572067,
+ "grad_norm": 0.7224540710449219,
+ "learning_rate": 9.70129438573747e-05,
+ "loss": 1.4773,
+ "step": 1527
+ },
+ {
+ "epoch": 1.528429870901191,
+ "grad_norm": 0.6425575017929077,
+ "learning_rate": 9.690816771330608e-05,
+ "loss": 1.329,
+ "step": 1528
+ },
+ {
+ "epoch": 1.5294301522303146,
+ "grad_norm": 0.6902957558631897,
+ "learning_rate": 9.680339496660192e-05,
+ "loss": 1.3085,
+ "step": 1529
+ },
+ {
+ "epoch": 1.5304304335594385,
+ "grad_norm": 0.6722397804260254,
+ "learning_rate": 9.669862573238863e-05,
+ "loss": 1.5163,
+ "step": 1530
+ },
+ {
+ "epoch": 1.5314307148885624,
+ "grad_norm": 0.7586985230445862,
+ "learning_rate": 9.659386012578863e-05,
+ "loss": 1.3072,
+ "step": 1531
+ },
+ {
+ "epoch": 1.5324309962176863,
+ "grad_norm": 0.7313751578330994,
+ "learning_rate": 9.648909826192033e-05,
+ "loss": 1.4071,
+ "step": 1532
+ },
+ {
+ "epoch": 1.5334312775468102,
+ "grad_norm": 0.6215599775314331,
+ "learning_rate": 9.63843402558981e-05,
+ "loss": 1.4255,
+ "step": 1533
+ },
+ {
+ "epoch": 1.5344315588759339,
+ "grad_norm": 0.7188824415206909,
+ "learning_rate": 9.627958622283203e-05,
+ "loss": 1.4148,
+ "step": 1534
+ },
+ {
+ "epoch": 1.5354318402050575,
+ "grad_norm": 0.6444137692451477,
+ "learning_rate": 9.617483627782788e-05,
+ "loss": 1.6461,
+ "step": 1535
+ },
+ {
+ "epoch": 1.5364321215341814,
+ "grad_norm": 0.7150428295135498,
+ "learning_rate": 9.607009053598689e-05,
+ "loss": 1.5799,
+ "step": 1536
+ },
+ {
+ "epoch": 1.5374324028633053,
+ "grad_norm": 0.6654619574546814,
+ "learning_rate": 9.596534911240566e-05,
+ "loss": 1.3762,
+ "step": 1537
+ },
+ {
+ "epoch": 1.5384326841924292,
+ "grad_norm": 0.5966542363166809,
+ "learning_rate": 9.58606121221761e-05,
+ "loss": 1.4305,
+ "step": 1538
+ },
+ {
+ "epoch": 1.539432965521553,
+ "grad_norm": 0.6061896085739136,
+ "learning_rate": 9.57558796803852e-05,
+ "loss": 1.2492,
+ "step": 1539
+ },
+ {
+ "epoch": 1.5404332468506767,
+ "grad_norm": 0.7098972797393799,
+ "learning_rate": 9.565115190211497e-05,
+ "loss": 1.3917,
+ "step": 1540
+ },
+ {
+ "epoch": 1.5414335281798006,
+ "grad_norm": 0.7661631107330322,
+ "learning_rate": 9.554642890244233e-05,
+ "loss": 1.5048,
+ "step": 1541
+ },
+ {
+ "epoch": 1.5424338095089243,
+ "grad_norm": 2.214449405670166,
+ "learning_rate": 9.54417107964389e-05,
+ "loss": 1.3739,
+ "step": 1542
+ },
+ {
+ "epoch": 1.5434340908380482,
+ "grad_norm": 0.6061079502105713,
+ "learning_rate": 9.533699769917092e-05,
+ "loss": 1.4466,
+ "step": 1543
+ },
+ {
+ "epoch": 1.544434372167172,
+ "grad_norm": 0.76081383228302,
+ "learning_rate": 9.523228972569917e-05,
+ "loss": 1.2733,
+ "step": 1544
+ },
+ {
+ "epoch": 1.545434653496296,
+ "grad_norm": 0.6445167064666748,
+ "learning_rate": 9.512758699107879e-05,
+ "loss": 1.404,
+ "step": 1545
+ },
+ {
+ "epoch": 1.5464349348254196,
+ "grad_norm": 0.53884357213974,
+ "learning_rate": 9.502288961035912e-05,
+ "loss": 1.2348,
+ "step": 1546
+ },
+ {
+ "epoch": 1.5474352161545435,
+ "grad_norm": 0.589690625667572,
+ "learning_rate": 9.491819769858366e-05,
+ "loss": 1.2912,
+ "step": 1547
+ },
+ {
+ "epoch": 1.5484354974836672,
+ "grad_norm": 0.6259596347808838,
+ "learning_rate": 9.48135113707899e-05,
+ "loss": 1.4254,
+ "step": 1548
+ },
+ {
+ "epoch": 1.549435778812791,
+ "grad_norm": 0.5648382306098938,
+ "learning_rate": 9.470883074200916e-05,
+ "loss": 1.205,
+ "step": 1549
+ },
+ {
+ "epoch": 1.550436060141915,
+ "grad_norm": 0.6659985780715942,
+ "learning_rate": 9.460415592726653e-05,
+ "loss": 1.2726,
+ "step": 1550
+ },
+ {
+ "epoch": 1.5514363414710388,
+ "grad_norm": 0.6457205414772034,
+ "learning_rate": 9.449948704158071e-05,
+ "loss": 1.4254,
+ "step": 1551
+ },
+ {
+ "epoch": 1.5524366228001627,
+ "grad_norm": 0.753474235534668,
+ "learning_rate": 9.439482419996384e-05,
+ "loss": 1.3446,
+ "step": 1552
+ },
+ {
+ "epoch": 1.5534369041292864,
+ "grad_norm": 0.6353628039360046,
+ "learning_rate": 9.42901675174215e-05,
+ "loss": 1.2947,
+ "step": 1553
+ },
+ {
+ "epoch": 1.55443718545841,
+ "grad_norm": 0.5484879016876221,
+ "learning_rate": 9.418551710895243e-05,
+ "loss": 1.3678,
+ "step": 1554
+ },
+ {
+ "epoch": 1.555437466787534,
+ "grad_norm": 0.564643919467926,
+ "learning_rate": 9.408087308954853e-05,
+ "loss": 1.2557,
+ "step": 1555
+ },
+ {
+ "epoch": 1.5564377481166578,
+ "grad_norm": 0.6570972800254822,
+ "learning_rate": 9.397623557419461e-05,
+ "loss": 1.3769,
+ "step": 1556
+ },
+ {
+ "epoch": 1.5574380294457817,
+ "grad_norm": 0.5943097472190857,
+ "learning_rate": 9.38716046778684e-05,
+ "loss": 1.2613,
+ "step": 1557
+ },
+ {
+ "epoch": 1.5584383107749056,
+ "grad_norm": 0.6349796056747437,
+ "learning_rate": 9.37669805155403e-05,
+ "loss": 1.4075,
+ "step": 1558
+ },
+ {
+ "epoch": 1.5594385921040292,
+ "grad_norm": 0.6645040512084961,
+ "learning_rate": 9.366236320217339e-05,
+ "loss": 1.6224,
+ "step": 1559
+ },
+ {
+ "epoch": 1.5604388734331531,
+ "grad_norm": 0.7049742937088013,
+ "learning_rate": 9.355775285272318e-05,
+ "loss": 1.3948,
+ "step": 1560
+ },
+ {
+ "epoch": 1.5614391547622768,
+ "grad_norm": 0.7328057885169983,
+ "learning_rate": 9.34531495821375e-05,
+ "loss": 1.4479,
+ "step": 1561
+ },
+ {
+ "epoch": 1.5624394360914007,
+ "grad_norm": 0.6629959344863892,
+ "learning_rate": 9.334855350535645e-05,
+ "loss": 1.4399,
+ "step": 1562
+ },
+ {
+ "epoch": 1.5634397174205246,
+ "grad_norm": 0.5964148640632629,
+ "learning_rate": 9.324396473731217e-05,
+ "loss": 1.3064,
+ "step": 1563
+ },
+ {
+ "epoch": 1.5644399987496485,
+ "grad_norm": 0.6908231377601624,
+ "learning_rate": 9.313938339292883e-05,
+ "loss": 1.234,
+ "step": 1564
+ },
+ {
+ "epoch": 1.5654402800787721,
+ "grad_norm": 0.6026841402053833,
+ "learning_rate": 9.303480958712239e-05,
+ "loss": 1.2906,
+ "step": 1565
+ },
+ {
+ "epoch": 1.566440561407896,
+ "grad_norm": 0.5934796333312988,
+ "learning_rate": 9.293024343480055e-05,
+ "loss": 1.1448,
+ "step": 1566
+ },
+ {
+ "epoch": 1.5674408427370197,
+ "grad_norm": 0.6869467496871948,
+ "learning_rate": 9.282568505086261e-05,
+ "loss": 1.283,
+ "step": 1567
+ },
+ {
+ "epoch": 1.5684411240661436,
+ "grad_norm": 0.6558713912963867,
+ "learning_rate": 9.272113455019935e-05,
+ "loss": 1.502,
+ "step": 1568
+ },
+ {
+ "epoch": 1.5694414053952674,
+ "grad_norm": 0.6627963781356812,
+ "learning_rate": 9.261659204769284e-05,
+ "loss": 1.2985,
+ "step": 1569
+ },
+ {
+ "epoch": 1.5704416867243913,
+ "grad_norm": 0.7012712359428406,
+ "learning_rate": 9.251205765821636e-05,
+ "loss": 1.3899,
+ "step": 1570
+ },
+ {
+ "epoch": 1.5714419680535152,
+ "grad_norm": 0.6767538785934448,
+ "learning_rate": 9.240753149663433e-05,
+ "loss": 1.4869,
+ "step": 1571
+ },
+ {
+ "epoch": 1.5724422493826389,
+ "grad_norm": 0.6274527311325073,
+ "learning_rate": 9.230301367780208e-05,
+ "loss": 1.4491,
+ "step": 1572
+ },
+ {
+ "epoch": 1.5734425307117625,
+ "grad_norm": 0.6079627275466919,
+ "learning_rate": 9.219850431656579e-05,
+ "loss": 1.247,
+ "step": 1573
+ },
+ {
+ "epoch": 1.5744428120408864,
+ "grad_norm": 0.9947478175163269,
+ "learning_rate": 9.209400352776237e-05,
+ "loss": 1.3117,
+ "step": 1574
+ },
+ {
+ "epoch": 1.5754430933700103,
+ "grad_norm": 0.6441598534584045,
+ "learning_rate": 9.198951142621929e-05,
+ "loss": 1.2931,
+ "step": 1575
+ },
+ {
+ "epoch": 1.5764433746991342,
+ "grad_norm": 0.5995433926582336,
+ "learning_rate": 9.188502812675446e-05,
+ "loss": 1.3293,
+ "step": 1576
+ },
+ {
+ "epoch": 1.577443656028258,
+ "grad_norm": 0.5997470617294312,
+ "learning_rate": 9.178055374417612e-05,
+ "loss": 1.2542,
+ "step": 1577
+ },
+ {
+ "epoch": 1.5784439373573818,
+ "grad_norm": 0.7376891374588013,
+ "learning_rate": 9.167608839328272e-05,
+ "loss": 1.4369,
+ "step": 1578
+ },
+ {
+ "epoch": 1.5794442186865054,
+ "grad_norm": 0.7353281378746033,
+ "learning_rate": 9.15716321888628e-05,
+ "loss": 1.6255,
+ "step": 1579
+ },
+ {
+ "epoch": 1.5804445000156293,
+ "grad_norm": 0.6899515986442566,
+ "learning_rate": 9.146718524569487e-05,
+ "loss": 1.2246,
+ "step": 1580
+ },
+ {
+ "epoch": 1.5814447813447532,
+ "grad_norm": 0.6453947424888611,
+ "learning_rate": 9.136274767854716e-05,
+ "loss": 1.3967,
+ "step": 1581
+ },
+ {
+ "epoch": 1.582445062673877,
+ "grad_norm": 0.7168171405792236,
+ "learning_rate": 9.125831960217774e-05,
+ "loss": 1.3952,
+ "step": 1582
+ },
+ {
+ "epoch": 1.583445344003001,
+ "grad_norm": 0.6337130665779114,
+ "learning_rate": 9.115390113133414e-05,
+ "loss": 1.1972,
+ "step": 1583
+ },
+ {
+ "epoch": 1.5844456253321246,
+ "grad_norm": 0.7215299606323242,
+ "learning_rate": 9.104949238075336e-05,
+ "loss": 1.3696,
+ "step": 1584
+ },
+ {
+ "epoch": 1.5854459066612485,
+ "grad_norm": 0.7228485941886902,
+ "learning_rate": 9.094509346516178e-05,
+ "loss": 1.3858,
+ "step": 1585
+ },
+ {
+ "epoch": 1.5864461879903722,
+ "grad_norm": 0.6178514957427979,
+ "learning_rate": 9.084070449927488e-05,
+ "loss": 1.1581,
+ "step": 1586
+ },
+ {
+ "epoch": 1.587446469319496,
+ "grad_norm": 0.5726553201675415,
+ "learning_rate": 9.07363255977973e-05,
+ "loss": 1.265,
+ "step": 1587
+ },
+ {
+ "epoch": 1.58844675064862,
+ "grad_norm": 0.6116858124732971,
+ "learning_rate": 9.063195687542249e-05,
+ "loss": 1.3056,
+ "step": 1588
+ },
+ {
+ "epoch": 1.5894470319777438,
+ "grad_norm": 0.6042388677597046,
+ "learning_rate": 9.052759844683295e-05,
+ "loss": 1.3403,
+ "step": 1589
+ },
+ {
+ "epoch": 1.5904473133068675,
+ "grad_norm": 0.6811801791191101,
+ "learning_rate": 9.042325042669961e-05,
+ "loss": 1.4842,
+ "step": 1590
+ },
+ {
+ "epoch": 1.5914475946359914,
+ "grad_norm": 0.591401994228363,
+ "learning_rate": 9.03189129296821e-05,
+ "loss": 0.975,
+ "step": 1591
+ },
+ {
+ "epoch": 1.592447875965115,
+ "grad_norm": 0.5907956957817078,
+ "learning_rate": 9.021458607042845e-05,
+ "loss": 1.2758,
+ "step": 1592
+ },
+ {
+ "epoch": 1.593448157294239,
+ "grad_norm": 0.7272189259529114,
+ "learning_rate": 9.011026996357503e-05,
+ "loss": 1.3661,
+ "step": 1593
+ },
+ {
+ "epoch": 1.5944484386233628,
+ "grad_norm": 0.652340829372406,
+ "learning_rate": 9.000596472374637e-05,
+ "loss": 1.3547,
+ "step": 1594
+ },
+ {
+ "epoch": 1.5954487199524867,
+ "grad_norm": 0.8212108016014099,
+ "learning_rate": 8.990167046555504e-05,
+ "loss": 1.2207,
+ "step": 1595
+ },
+ {
+ "epoch": 1.5964490012816106,
+ "grad_norm": 0.6061079502105713,
+ "learning_rate": 8.97973873036016e-05,
+ "loss": 1.2555,
+ "step": 1596
+ },
+ {
+ "epoch": 1.5974492826107343,
+ "grad_norm": 0.5761566162109375,
+ "learning_rate": 8.969311535247438e-05,
+ "loss": 1.3051,
+ "step": 1597
+ },
+ {
+ "epoch": 1.598449563939858,
+ "grad_norm": 0.6714027523994446,
+ "learning_rate": 8.958885472674939e-05,
+ "loss": 1.4451,
+ "step": 1598
+ },
+ {
+ "epoch": 1.5994498452689818,
+ "grad_norm": 0.6701240539550781,
+ "learning_rate": 8.948460554099018e-05,
+ "loss": 1.4353,
+ "step": 1599
+ },
+ {
+ "epoch": 1.6004501265981057,
+ "grad_norm": 0.7223709225654602,
+ "learning_rate": 8.93803679097478e-05,
+ "loss": 1.4029,
+ "step": 1600
+ },
+ {
+ "epoch": 1.6014504079272296,
+ "grad_norm": 0.6414337158203125,
+ "learning_rate": 8.927614194756052e-05,
+ "loss": 1.304,
+ "step": 1601
+ },
+ {
+ "epoch": 1.6024506892563535,
+ "grad_norm": 0.6110413670539856,
+ "learning_rate": 8.917192776895382e-05,
+ "loss": 1.2504,
+ "step": 1602
+ },
+ {
+ "epoch": 1.6034509705854771,
+ "grad_norm": 0.6857700943946838,
+ "learning_rate": 8.906772548844026e-05,
+ "loss": 1.4735,
+ "step": 1603
+ },
+ {
+ "epoch": 1.604451251914601,
+ "grad_norm": 0.7300008535385132,
+ "learning_rate": 8.896353522051928e-05,
+ "loss": 1.753,
+ "step": 1604
+ },
+ {
+ "epoch": 1.6054515332437247,
+ "grad_norm": 0.6020368933677673,
+ "learning_rate": 8.885935707967716e-05,
+ "loss": 1.2869,
+ "step": 1605
+ },
+ {
+ "epoch": 1.6064518145728486,
+ "grad_norm": 0.5244629979133606,
+ "learning_rate": 8.875519118038684e-05,
+ "loss": 1.432,
+ "step": 1606
+ },
+ {
+ "epoch": 1.6074520959019725,
+ "grad_norm": 0.6224693059921265,
+ "learning_rate": 8.865103763710777e-05,
+ "loss": 1.6618,
+ "step": 1607
+ },
+ {
+ "epoch": 1.6084523772310964,
+ "grad_norm": 0.6111294627189636,
+ "learning_rate": 8.854689656428591e-05,
+ "loss": 1.4098,
+ "step": 1608
+ },
+ {
+ "epoch": 1.60945265856022,
+ "grad_norm": 0.6179168820381165,
+ "learning_rate": 8.844276807635343e-05,
+ "loss": 1.1749,
+ "step": 1609
+ },
+ {
+ "epoch": 1.610452939889344,
+ "grad_norm": 0.658416748046875,
+ "learning_rate": 8.833865228772871e-05,
+ "loss": 1.3686,
+ "step": 1610
+ },
+ {
+ "epoch": 1.6114532212184676,
+ "grad_norm": 0.5781399607658386,
+ "learning_rate": 8.823454931281616e-05,
+ "loss": 1.3618,
+ "step": 1611
+ },
+ {
+ "epoch": 1.6124535025475915,
+ "grad_norm": 0.6480880975723267,
+ "learning_rate": 8.813045926600615e-05,
+ "loss": 1.4162,
+ "step": 1612
+ },
+ {
+ "epoch": 1.6134537838767153,
+ "grad_norm": 0.5510106682777405,
+ "learning_rate": 8.802638226167479e-05,
+ "loss": 1.1625,
+ "step": 1613
+ },
+ {
+ "epoch": 1.6144540652058392,
+ "grad_norm": 0.6937603950500488,
+ "learning_rate": 8.792231841418391e-05,
+ "loss": 1.462,
+ "step": 1614
+ },
+ {
+ "epoch": 1.6154543465349631,
+ "grad_norm": 0.8652899265289307,
+ "learning_rate": 8.781826783788084e-05,
+ "loss": 1.389,
+ "step": 1615
+ },
+ {
+ "epoch": 1.6164546278640868,
+ "grad_norm": 0.6258351802825928,
+ "learning_rate": 8.771423064709837e-05,
+ "loss": 1.322,
+ "step": 1616
+ },
+ {
+ "epoch": 1.6174549091932104,
+ "grad_norm": 0.6774043440818787,
+ "learning_rate": 8.76102069561545e-05,
+ "loss": 1.3344,
+ "step": 1617
+ },
+ {
+ "epoch": 1.6184551905223343,
+ "grad_norm": 0.6406411528587341,
+ "learning_rate": 8.750619687935251e-05,
+ "loss": 1.3929,
+ "step": 1618
+ },
+ {
+ "epoch": 1.6194554718514582,
+ "grad_norm": 0.6380543112754822,
+ "learning_rate": 8.740220053098067e-05,
+ "loss": 1.413,
+ "step": 1619
+ },
+ {
+ "epoch": 1.620455753180582,
+ "grad_norm": 0.7143612504005432,
+ "learning_rate": 8.729821802531212e-05,
+ "loss": 1.541,
+ "step": 1620
+ },
+ {
+ "epoch": 1.621456034509706,
+ "grad_norm": 0.5897488594055176,
+ "learning_rate": 8.719424947660487e-05,
+ "loss": 1.2968,
+ "step": 1621
+ },
+ {
+ "epoch": 1.6224563158388297,
+ "grad_norm": 0.6275039911270142,
+ "learning_rate": 8.70902949991015e-05,
+ "loss": 1.4192,
+ "step": 1622
+ },
+ {
+ "epoch": 1.6234565971679535,
+ "grad_norm": 0.7218581438064575,
+ "learning_rate": 8.698635470702923e-05,
+ "loss": 1.41,
+ "step": 1623
+ },
+ {
+ "epoch": 1.6244568784970772,
+ "grad_norm": 0.7635208964347839,
+ "learning_rate": 8.688242871459963e-05,
+ "loss": 1.3006,
+ "step": 1624
+ },
+ {
+ "epoch": 1.625457159826201,
+ "grad_norm": 0.746904730796814,
+ "learning_rate": 8.677851713600855e-05,
+ "loss": 1.7362,
+ "step": 1625
+ },
+ {
+ "epoch": 1.626457441155325,
+ "grad_norm": 0.6612946391105652,
+ "learning_rate": 8.667462008543603e-05,
+ "loss": 1.6304,
+ "step": 1626
+ },
+ {
+ "epoch": 1.6274577224844489,
+ "grad_norm": 0.6576234698295593,
+ "learning_rate": 8.657073767704615e-05,
+ "loss": 1.4814,
+ "step": 1627
+ },
+ {
+ "epoch": 1.6284580038135725,
+ "grad_norm": 0.6360548138618469,
+ "learning_rate": 8.646687002498692e-05,
+ "loss": 1.4134,
+ "step": 1628
+ },
+ {
+ "epoch": 1.6294582851426964,
+ "grad_norm": 0.6985300183296204,
+ "learning_rate": 8.636301724339004e-05,
+ "loss": 1.4916,
+ "step": 1629
+ },
+ {
+ "epoch": 1.63045856647182,
+ "grad_norm": 0.5798565149307251,
+ "learning_rate": 8.625917944637096e-05,
+ "loss": 1.4304,
+ "step": 1630
+ },
+ {
+ "epoch": 1.631458847800944,
+ "grad_norm": 0.6414662599563599,
+ "learning_rate": 8.615535674802865e-05,
+ "loss": 1.4268,
+ "step": 1631
+ },
+ {
+ "epoch": 1.6324591291300679,
+ "grad_norm": 0.6145825386047363,
+ "learning_rate": 8.605154926244543e-05,
+ "loss": 1.1465,
+ "step": 1632
+ },
+ {
+ "epoch": 1.6334594104591917,
+ "grad_norm": 0.6959055662155151,
+ "learning_rate": 8.594775710368704e-05,
+ "loss": 1.4171,
+ "step": 1633
+ },
+ {
+ "epoch": 1.6344596917883156,
+ "grad_norm": 0.6742627024650574,
+ "learning_rate": 8.584398038580226e-05,
+ "loss": 1.4406,
+ "step": 1634
+ },
+ {
+ "epoch": 1.6354599731174393,
+ "grad_norm": 0.6703431606292725,
+ "learning_rate": 8.574021922282292e-05,
+ "loss": 1.423,
+ "step": 1635
+ },
+ {
+ "epoch": 1.636460254446563,
+ "grad_norm": 0.6874369382858276,
+ "learning_rate": 8.563647372876378e-05,
+ "loss": 1.3816,
+ "step": 1636
+ },
+ {
+ "epoch": 1.6374605357756868,
+ "grad_norm": 0.6975083351135254,
+ "learning_rate": 8.553274401762237e-05,
+ "loss": 1.3082,
+ "step": 1637
+ },
+ {
+ "epoch": 1.6384608171048107,
+ "grad_norm": 0.6560392379760742,
+ "learning_rate": 8.542903020337887e-05,
+ "loss": 1.4106,
+ "step": 1638
+ },
+ {
+ "epoch": 1.6394610984339346,
+ "grad_norm": 0.7531464099884033,
+ "learning_rate": 8.532533239999602e-05,
+ "loss": 1.1798,
+ "step": 1639
+ },
+ {
+ "epoch": 1.6404613797630585,
+ "grad_norm": 0.683595597743988,
+ "learning_rate": 8.522165072141897e-05,
+ "loss": 1.2502,
+ "step": 1640
+ },
+ {
+ "epoch": 1.6414616610921822,
+ "grad_norm": 0.6005716323852539,
+ "learning_rate": 8.511798528157512e-05,
+ "loss": 1.1569,
+ "step": 1641
+ },
+ {
+ "epoch": 1.6424619424213058,
+ "grad_norm": 0.5878857374191284,
+ "learning_rate": 8.501433619437403e-05,
+ "loss": 1.1993,
+ "step": 1642
+ },
+ {
+ "epoch": 1.6434622237504297,
+ "grad_norm": 0.6806159615516663,
+ "learning_rate": 8.49107035737073e-05,
+ "loss": 1.48,
+ "step": 1643
+ },
+ {
+ "epoch": 1.6444625050795536,
+ "grad_norm": 0.6241198182106018,
+ "learning_rate": 8.480708753344846e-05,
+ "loss": 1.5493,
+ "step": 1644
+ },
+ {
+ "epoch": 1.6454627864086775,
+ "grad_norm": 0.6669902205467224,
+ "learning_rate": 8.470348818745278e-05,
+ "loss": 1.3893,
+ "step": 1645
+ },
+ {
+ "epoch": 1.6464630677378014,
+ "grad_norm": 0.7545666694641113,
+ "learning_rate": 8.459990564955721e-05,
+ "loss": 1.2774,
+ "step": 1646
+ },
+ {
+ "epoch": 1.647463349066925,
+ "grad_norm": 0.7358554601669312,
+ "learning_rate": 8.449634003358022e-05,
+ "loss": 1.4932,
+ "step": 1647
+ },
+ {
+ "epoch": 1.648463630396049,
+ "grad_norm": 0.5833718180656433,
+ "learning_rate": 8.43927914533217e-05,
+ "loss": 1.447,
+ "step": 1648
+ },
+ {
+ "epoch": 1.6494639117251726,
+ "grad_norm": 0.7357178330421448,
+ "learning_rate": 8.428926002256283e-05,
+ "loss": 1.5369,
+ "step": 1649
+ },
+ {
+ "epoch": 1.6504641930542965,
+ "grad_norm": 0.6580341458320618,
+ "learning_rate": 8.418574585506591e-05,
+ "loss": 1.5551,
+ "step": 1650
+ },
+ {
+ "epoch": 1.6514644743834204,
+ "grad_norm": 0.637784481048584,
+ "learning_rate": 8.408224906457429e-05,
+ "loss": 1.3233,
+ "step": 1651
+ },
+ {
+ "epoch": 1.6524647557125443,
+ "grad_norm": 0.6412131190299988,
+ "learning_rate": 8.397876976481224e-05,
+ "loss": 1.15,
+ "step": 1652
+ },
+ {
+ "epoch": 1.653465037041668,
+ "grad_norm": 0.6500189900398254,
+ "learning_rate": 8.387530806948476e-05,
+ "loss": 1.3027,
+ "step": 1653
+ },
+ {
+ "epoch": 1.6544653183707918,
+ "grad_norm": 0.6529775857925415,
+ "learning_rate": 8.37718640922776e-05,
+ "loss": 1.4015,
+ "step": 1654
+ },
+ {
+ "epoch": 1.6554655996999155,
+ "grad_norm": 0.5686854720115662,
+ "learning_rate": 8.366843794685695e-05,
+ "loss": 1.3276,
+ "step": 1655
+ },
+ {
+ "epoch": 1.6564658810290394,
+ "grad_norm": 0.6397770643234253,
+ "learning_rate": 8.356502974686941e-05,
+ "loss": 1.2586,
+ "step": 1656
+ },
+ {
+ "epoch": 1.6574661623581632,
+ "grad_norm": 0.688079297542572,
+ "learning_rate": 8.346163960594193e-05,
+ "loss": 1.4179,
+ "step": 1657
+ },
+ {
+ "epoch": 1.6584664436872871,
+ "grad_norm": 0.8436989784240723,
+ "learning_rate": 8.335826763768156e-05,
+ "loss": 1.3527,
+ "step": 1658
+ },
+ {
+ "epoch": 1.659466725016411,
+ "grad_norm": 0.6351317763328552,
+ "learning_rate": 8.325491395567541e-05,
+ "loss": 1.2692,
+ "step": 1659
+ },
+ {
+ "epoch": 1.6604670063455347,
+ "grad_norm": 0.6486929059028625,
+ "learning_rate": 8.315157867349046e-05,
+ "loss": 1.3388,
+ "step": 1660
+ },
+ {
+ "epoch": 1.6614672876746583,
+ "grad_norm": 0.6507102847099304,
+ "learning_rate": 8.30482619046735e-05,
+ "loss": 1.3335,
+ "step": 1661
+ },
+ {
+ "epoch": 1.6624675690037822,
+ "grad_norm": 0.5884740948677063,
+ "learning_rate": 8.294496376275104e-05,
+ "loss": 1.3208,
+ "step": 1662
+ },
+ {
+ "epoch": 1.6634678503329061,
+ "grad_norm": 0.6564494371414185,
+ "learning_rate": 8.284168436122898e-05,
+ "loss": 1.164,
+ "step": 1663
+ },
+ {
+ "epoch": 1.66446813166203,
+ "grad_norm": 0.738129198551178,
+ "learning_rate": 8.273842381359273e-05,
+ "loss": 1.5263,
+ "step": 1664
+ },
+ {
+ "epoch": 1.665468412991154,
+ "grad_norm": 0.6807828545570374,
+ "learning_rate": 8.263518223330697e-05,
+ "loss": 1.3342,
+ "step": 1665
+ },
+ {
+ "epoch": 1.6664686943202776,
+ "grad_norm": 0.6691699624061584,
+ "learning_rate": 8.253195973381552e-05,
+ "loss": 1.3643,
+ "step": 1666
+ },
+ {
+ "epoch": 1.6674689756494014,
+ "grad_norm": 0.7294644117355347,
+ "learning_rate": 8.242875642854121e-05,
+ "loss": 1.3019,
+ "step": 1667
+ },
+ {
+ "epoch": 1.668469256978525,
+ "grad_norm": 0.9484224915504456,
+ "learning_rate": 8.232557243088585e-05,
+ "loss": 1.568,
+ "step": 1668
+ },
+ {
+ "epoch": 1.669469538307649,
+ "grad_norm": 0.6717697978019714,
+ "learning_rate": 8.222240785422996e-05,
+ "loss": 1.4427,
+ "step": 1669
+ },
+ {
+ "epoch": 1.6704698196367729,
+ "grad_norm": 0.6472289562225342,
+ "learning_rate": 8.211926281193277e-05,
+ "loss": 1.3165,
+ "step": 1670
+ },
+ {
+ "epoch": 1.6714701009658968,
+ "grad_norm": 0.6698246598243713,
+ "learning_rate": 8.201613741733203e-05,
+ "loss": 1.5418,
+ "step": 1671
+ },
+ {
+ "epoch": 1.6724703822950204,
+ "grad_norm": 0.6673927307128906,
+ "learning_rate": 8.191303178374389e-05,
+ "loss": 1.2539,
+ "step": 1672
+ },
+ {
+ "epoch": 1.6734706636241443,
+ "grad_norm": 0.659993588924408,
+ "learning_rate": 8.180994602446279e-05,
+ "loss": 1.4708,
+ "step": 1673
+ },
+ {
+ "epoch": 1.674470944953268,
+ "grad_norm": 0.6471976041793823,
+ "learning_rate": 8.170688025276134e-05,
+ "loss": 1.2014,
+ "step": 1674
+ },
+ {
+ "epoch": 1.6754712262823919,
+ "grad_norm": 0.6816028952598572,
+ "learning_rate": 8.160383458189022e-05,
+ "loss": 1.4187,
+ "step": 1675
+ },
+ {
+ "epoch": 1.6764715076115158,
+ "grad_norm": 0.747825562953949,
+ "learning_rate": 8.15008091250779e-05,
+ "loss": 1.4394,
+ "step": 1676
+ },
+ {
+ "epoch": 1.6774717889406396,
+ "grad_norm": 0.6439304947853088,
+ "learning_rate": 8.13978039955308e-05,
+ "loss": 1.3567,
+ "step": 1677
+ },
+ {
+ "epoch": 1.6784720702697635,
+ "grad_norm": 0.7007876634597778,
+ "learning_rate": 8.12948193064329e-05,
+ "loss": 1.3913,
+ "step": 1678
+ },
+ {
+ "epoch": 1.6794723515988872,
+ "grad_norm": 0.6919410228729248,
+ "learning_rate": 8.119185517094578e-05,
+ "loss": 1.217,
+ "step": 1679
+ },
+ {
+ "epoch": 1.6804726329280109,
+ "grad_norm": 0.59043288230896,
+ "learning_rate": 8.108891170220836e-05,
+ "loss": 1.2202,
+ "step": 1680
+ },
+ {
+ "epoch": 1.6814729142571347,
+ "grad_norm": 0.6019158363342285,
+ "learning_rate": 8.098598901333692e-05,
+ "loss": 1.2729,
+ "step": 1681
+ },
+ {
+ "epoch": 1.6824731955862586,
+ "grad_norm": 0.7911222577095032,
+ "learning_rate": 8.088308721742491e-05,
+ "loss": 1.3911,
+ "step": 1682
+ },
+ {
+ "epoch": 1.6834734769153825,
+ "grad_norm": 0.6759652495384216,
+ "learning_rate": 8.078020642754274e-05,
+ "loss": 1.3394,
+ "step": 1683
+ },
+ {
+ "epoch": 1.6844737582445064,
+ "grad_norm": 0.6729623675346375,
+ "learning_rate": 8.06773467567378e-05,
+ "loss": 1.2622,
+ "step": 1684
+ },
+ {
+ "epoch": 1.68547403957363,
+ "grad_norm": 0.7148420214653015,
+ "learning_rate": 8.057450831803428e-05,
+ "loss": 1.363,
+ "step": 1685
+ },
+ {
+ "epoch": 1.686474320902754,
+ "grad_norm": 0.6276561617851257,
+ "learning_rate": 8.047169122443302e-05,
+ "loss": 1.4551,
+ "step": 1686
+ },
+ {
+ "epoch": 1.6874746022318776,
+ "grad_norm": 0.6642428636550903,
+ "learning_rate": 8.036889558891142e-05,
+ "loss": 1.5254,
+ "step": 1687
+ },
+ {
+ "epoch": 1.6884748835610015,
+ "grad_norm": 0.6483539342880249,
+ "learning_rate": 8.026612152442329e-05,
+ "loss": 1.4619,
+ "step": 1688
+ },
+ {
+ "epoch": 1.6894751648901254,
+ "grad_norm": 0.799802303314209,
+ "learning_rate": 8.016336914389874e-05,
+ "loss": 1.5548,
+ "step": 1689
+ },
+ {
+ "epoch": 1.6904754462192493,
+ "grad_norm": 0.6891320943832397,
+ "learning_rate": 8.006063856024405e-05,
+ "loss": 1.3208,
+ "step": 1690
+ },
+ {
+ "epoch": 1.691475727548373,
+ "grad_norm": 0.6820452213287354,
+ "learning_rate": 7.995792988634152e-05,
+ "loss": 1.25,
+ "step": 1691
+ },
+ {
+ "epoch": 1.6924760088774968,
+ "grad_norm": 0.6455455422401428,
+ "learning_rate": 7.985524323504948e-05,
+ "loss": 1.4092,
+ "step": 1692
+ },
+ {
+ "epoch": 1.6934762902066205,
+ "grad_norm": 0.5509824156761169,
+ "learning_rate": 7.975257871920195e-05,
+ "loss": 1.2324,
+ "step": 1693
+ },
+ {
+ "epoch": 1.6944765715357444,
+ "grad_norm": 0.665798544883728,
+ "learning_rate": 7.964993645160866e-05,
+ "loss": 1.318,
+ "step": 1694
+ },
+ {
+ "epoch": 1.6954768528648683,
+ "grad_norm": 0.5200991034507751,
+ "learning_rate": 7.954731654505491e-05,
+ "loss": 1.1431,
+ "step": 1695
+ },
+ {
+ "epoch": 1.6964771341939922,
+ "grad_norm": 0.7394373416900635,
+ "learning_rate": 7.944471911230142e-05,
+ "loss": 1.5046,
+ "step": 1696
+ },
+ {
+ "epoch": 1.697477415523116,
+ "grad_norm": 0.7250887155532837,
+ "learning_rate": 7.93421442660842e-05,
+ "loss": 1.6707,
+ "step": 1697
+ },
+ {
+ "epoch": 1.6984776968522397,
+ "grad_norm": 0.6814633011817932,
+ "learning_rate": 7.923959211911449e-05,
+ "loss": 1.3375,
+ "step": 1698
+ },
+ {
+ "epoch": 1.6994779781813634,
+ "grad_norm": 0.8540093302726746,
+ "learning_rate": 7.91370627840785e-05,
+ "loss": 1.4843,
+ "step": 1699
+ },
+ {
+ "epoch": 1.7004782595104873,
+ "grad_norm": 0.596825122833252,
+ "learning_rate": 7.903455637363746e-05,
+ "loss": 1.2795,
+ "step": 1700
+ },
+ {
+ "epoch": 1.7014785408396111,
+ "grad_norm": 0.650389552116394,
+ "learning_rate": 7.89320730004274e-05,
+ "loss": 1.4164,
+ "step": 1701
+ },
+ {
+ "epoch": 1.702478822168735,
+ "grad_norm": 0.6001396179199219,
+ "learning_rate": 7.882961277705895e-05,
+ "loss": 1.3107,
+ "step": 1702
+ },
+ {
+ "epoch": 1.703479103497859,
+ "grad_norm": 0.6638504266738892,
+ "learning_rate": 7.872717581611741e-05,
+ "loss": 1.3454,
+ "step": 1703
+ },
+ {
+ "epoch": 1.7044793848269826,
+ "grad_norm": 0.6506242752075195,
+ "learning_rate": 7.862476223016246e-05,
+ "loss": 1.3095,
+ "step": 1704
+ },
+ {
+ "epoch": 1.7054796661561062,
+ "grad_norm": 0.6528734564781189,
+ "learning_rate": 7.852237213172812e-05,
+ "loss": 1.3354,
+ "step": 1705
+ },
+ {
+ "epoch": 1.7064799474852301,
+ "grad_norm": 0.6626534461975098,
+ "learning_rate": 7.842000563332254e-05,
+ "loss": 1.2478,
+ "step": 1706
+ },
+ {
+ "epoch": 1.707480228814354,
+ "grad_norm": 0.6849489212036133,
+ "learning_rate": 7.831766284742807e-05,
+ "loss": 1.4748,
+ "step": 1707
+ },
+ {
+ "epoch": 1.708480510143478,
+ "grad_norm": 0.6511324048042297,
+ "learning_rate": 7.82153438865009e-05,
+ "loss": 1.3912,
+ "step": 1708
+ },
+ {
+ "epoch": 1.7094807914726018,
+ "grad_norm": 0.610500156879425,
+ "learning_rate": 7.811304886297104e-05,
+ "loss": 1.5791,
+ "step": 1709
+ },
+ {
+ "epoch": 1.7104810728017255,
+ "grad_norm": 0.6765373945236206,
+ "learning_rate": 7.801077788924224e-05,
+ "loss": 1.3725,
+ "step": 1710
+ },
+ {
+ "epoch": 1.7114813541308493,
+ "grad_norm": 0.7581295371055603,
+ "learning_rate": 7.790853107769179e-05,
+ "loss": 1.4736,
+ "step": 1711
+ },
+ {
+ "epoch": 1.712481635459973,
+ "grad_norm": 0.7430140376091003,
+ "learning_rate": 7.780630854067045e-05,
+ "loss": 1.4055,
+ "step": 1712
+ },
+ {
+ "epoch": 1.713481916789097,
+ "grad_norm": 0.6995111107826233,
+ "learning_rate": 7.77041103905023e-05,
+ "loss": 1.4391,
+ "step": 1713
+ },
+ {
+ "epoch": 1.7144821981182208,
+ "grad_norm": 0.6253392100334167,
+ "learning_rate": 7.760193673948461e-05,
+ "loss": 1.3054,
+ "step": 1714
+ },
+ {
+ "epoch": 1.7154824794473447,
+ "grad_norm": 0.6570398211479187,
+ "learning_rate": 7.749978769988778e-05,
+ "loss": 1.328,
+ "step": 1715
+ },
+ {
+ "epoch": 1.7164827607764683,
+ "grad_norm": 0.6701228022575378,
+ "learning_rate": 7.739766338395511e-05,
+ "loss": 1.4463,
+ "step": 1716
+ },
+ {
+ "epoch": 1.7174830421055922,
+ "grad_norm": 0.6792857050895691,
+ "learning_rate": 7.729556390390275e-05,
+ "loss": 1.3868,
+ "step": 1717
+ },
+ {
+ "epoch": 1.7184833234347159,
+ "grad_norm": 0.6209396123886108,
+ "learning_rate": 7.719348937191957e-05,
+ "loss": 1.4349,
+ "step": 1718
+ },
+ {
+ "epoch": 1.7194836047638398,
+ "grad_norm": 0.6768544316291809,
+ "learning_rate": 7.709143990016702e-05,
+ "loss": 1.3351,
+ "step": 1719
+ },
+ {
+ "epoch": 1.7204838860929637,
+ "grad_norm": 0.6399651765823364,
+ "learning_rate": 7.698941560077899e-05,
+ "loss": 1.0941,
+ "step": 1720
+ },
+ {
+ "epoch": 1.7214841674220875,
+ "grad_norm": 0.727689802646637,
+ "learning_rate": 7.688741658586178e-05,
+ "loss": 1.6035,
+ "step": 1721
+ },
+ {
+ "epoch": 1.7224844487512114,
+ "grad_norm": 0.7155159711837769,
+ "learning_rate": 7.678544296749384e-05,
+ "loss": 1.3062,
+ "step": 1722
+ },
+ {
+ "epoch": 1.723484730080335,
+ "grad_norm": 0.7038417458534241,
+ "learning_rate": 7.668349485772572e-05,
+ "loss": 1.3564,
+ "step": 1723
+ },
+ {
+ "epoch": 1.7244850114094588,
+ "grad_norm": 0.6355061531066895,
+ "learning_rate": 7.658157236857999e-05,
+ "loss": 1.2889,
+ "step": 1724
+ },
+ {
+ "epoch": 1.7254852927385826,
+ "grad_norm": 0.6973921060562134,
+ "learning_rate": 7.6479675612051e-05,
+ "loss": 1.4768,
+ "step": 1725
+ },
+ {
+ "epoch": 1.7264855740677065,
+ "grad_norm": 0.6392905712127686,
+ "learning_rate": 7.637780470010487e-05,
+ "loss": 1.2648,
+ "step": 1726
+ },
+ {
+ "epoch": 1.7274858553968304,
+ "grad_norm": 0.6824831962585449,
+ "learning_rate": 7.62759597446793e-05,
+ "loss": 1.3659,
+ "step": 1727
+ },
+ {
+ "epoch": 1.7284861367259543,
+ "grad_norm": 0.6261188983917236,
+ "learning_rate": 7.617414085768351e-05,
+ "loss": 1.1169,
+ "step": 1728
+ },
+ {
+ "epoch": 1.729486418055078,
+ "grad_norm": 0.749819815158844,
+ "learning_rate": 7.607234815099802e-05,
+ "loss": 1.5689,
+ "step": 1729
+ },
+ {
+ "epoch": 1.7304866993842019,
+ "grad_norm": 0.6438270211219788,
+ "learning_rate": 7.597058173647458e-05,
+ "loss": 1.5213,
+ "step": 1730
+ },
+ {
+ "epoch": 1.7314869807133255,
+ "grad_norm": 0.6761091351509094,
+ "learning_rate": 7.586884172593609e-05,
+ "loss": 1.4573,
+ "step": 1731
+ },
+ {
+ "epoch": 1.7324872620424494,
+ "grad_norm": 0.7616665363311768,
+ "learning_rate": 7.576712823117645e-05,
+ "loss": 1.362,
+ "step": 1732
+ },
+ {
+ "epoch": 1.7334875433715733,
+ "grad_norm": 0.6882945895195007,
+ "learning_rate": 7.566544136396037e-05,
+ "loss": 1.3095,
+ "step": 1733
+ },
+ {
+ "epoch": 1.7344878247006972,
+ "grad_norm": 0.6496737003326416,
+ "learning_rate": 7.556378123602334e-05,
+ "loss": 1.7484,
+ "step": 1734
+ },
+ {
+ "epoch": 1.7354881060298208,
+ "grad_norm": 0.5761104822158813,
+ "learning_rate": 7.54621479590714e-05,
+ "loss": 1.1494,
+ "step": 1735
+ },
+ {
+ "epoch": 1.7364883873589447,
+ "grad_norm": 0.5761191248893738,
+ "learning_rate": 7.536054164478123e-05,
+ "loss": 1.235,
+ "step": 1736
+ },
+ {
+ "epoch": 1.7374886686880684,
+ "grad_norm": 0.6114856600761414,
+ "learning_rate": 7.525896240479976e-05,
+ "loss": 1.2359,
+ "step": 1737
+ },
+ {
+ "epoch": 1.7384889500171923,
+ "grad_norm": 0.6941234469413757,
+ "learning_rate": 7.51574103507442e-05,
+ "loss": 1.2464,
+ "step": 1738
+ },
+ {
+ "epoch": 1.7394892313463162,
+ "grad_norm": 0.8174465298652649,
+ "learning_rate": 7.505588559420189e-05,
+ "loss": 1.4083,
+ "step": 1739
+ },
+ {
+ "epoch": 1.74048951267544,
+ "grad_norm": 0.6795939207077026,
+ "learning_rate": 7.495438824673016e-05,
+ "loss": 1.1648,
+ "step": 1740
+ },
+ {
+ "epoch": 1.741489794004564,
+ "grad_norm": 0.7111831307411194,
+ "learning_rate": 7.485291841985626e-05,
+ "loss": 1.4835,
+ "step": 1741
+ },
+ {
+ "epoch": 1.7424900753336876,
+ "grad_norm": 0.6238039135932922,
+ "learning_rate": 7.475147622507717e-05,
+ "loss": 1.1049,
+ "step": 1742
+ },
+ {
+ "epoch": 1.7434903566628113,
+ "grad_norm": 0.6977236270904541,
+ "learning_rate": 7.465006177385953e-05,
+ "loss": 1.2669,
+ "step": 1743
+ },
+ {
+ "epoch": 1.7444906379919352,
+ "grad_norm": 0.5952944159507751,
+ "learning_rate": 7.454867517763948e-05,
+ "loss": 1.3034,
+ "step": 1744
+ },
+ {
+ "epoch": 1.745490919321059,
+ "grad_norm": 0.6380055546760559,
+ "learning_rate": 7.444731654782253e-05,
+ "loss": 1.348,
+ "step": 1745
+ },
+ {
+ "epoch": 1.746491200650183,
+ "grad_norm": 0.7760757207870483,
+ "learning_rate": 7.434598599578351e-05,
+ "loss": 1.4662,
+ "step": 1746
+ },
+ {
+ "epoch": 1.7474914819793068,
+ "grad_norm": 0.6411606073379517,
+ "learning_rate": 7.424468363286634e-05,
+ "loss": 1.3775,
+ "step": 1747
+ },
+ {
+ "epoch": 1.7484917633084305,
+ "grad_norm": 0.6461936235427856,
+ "learning_rate": 7.414340957038406e-05,
+ "loss": 1.53,
+ "step": 1748
+ },
+ {
+ "epoch": 1.7494920446375544,
+ "grad_norm": 0.6881558299064636,
+ "learning_rate": 7.404216391961847e-05,
+ "loss": 1.2593,
+ "step": 1749
+ },
+ {
+ "epoch": 1.750492325966678,
+ "grad_norm": 0.6212877631187439,
+ "learning_rate": 7.394094679182024e-05,
+ "loss": 1.3894,
+ "step": 1750
+ },
+ {
+ "epoch": 1.751492607295802,
+ "grad_norm": 0.6928493976593018,
+ "learning_rate": 7.383975829820874e-05,
+ "loss": 1.4044,
+ "step": 1751
+ },
+ {
+ "epoch": 1.7524928886249258,
+ "grad_norm": 0.8617000579833984,
+ "learning_rate": 7.37385985499718e-05,
+ "loss": 1.6135,
+ "step": 1752
+ },
+ {
+ "epoch": 1.7534931699540497,
+ "grad_norm": 0.5414397716522217,
+ "learning_rate": 7.36374676582657e-05,
+ "loss": 1.2033,
+ "step": 1753
+ },
+ {
+ "epoch": 1.7544934512831734,
+ "grad_norm": 0.6777795553207397,
+ "learning_rate": 7.353636573421496e-05,
+ "loss": 1.2382,
+ "step": 1754
+ },
+ {
+ "epoch": 1.7554937326122972,
+ "grad_norm": 0.6595777273178101,
+ "learning_rate": 7.343529288891239e-05,
+ "loss": 1.3224,
+ "step": 1755
+ },
+ {
+ "epoch": 1.756494013941421,
+ "grad_norm": 0.6468746066093445,
+ "learning_rate": 7.333424923341868e-05,
+ "loss": 1.3266,
+ "step": 1756
+ },
+ {
+ "epoch": 1.7574942952705448,
+ "grad_norm": 0.6333887577056885,
+ "learning_rate": 7.323323487876257e-05,
+ "loss": 1.2204,
+ "step": 1757
+ },
+ {
+ "epoch": 1.7584945765996687,
+ "grad_norm": 0.6916175484657288,
+ "learning_rate": 7.313224993594057e-05,
+ "loss": 1.2936,
+ "step": 1758
+ },
+ {
+ "epoch": 1.7594948579287926,
+ "grad_norm": 0.6844228506088257,
+ "learning_rate": 7.303129451591686e-05,
+ "loss": 1.3675,
+ "step": 1759
+ },
+ {
+ "epoch": 1.7604951392579165,
+ "grad_norm": 0.774632453918457,
+ "learning_rate": 7.29303687296232e-05,
+ "loss": 1.4389,
+ "step": 1760
+ },
+ {
+ "epoch": 1.7614954205870401,
+ "grad_norm": 0.648438572883606,
+ "learning_rate": 7.282947268795877e-05,
+ "loss": 1.3824,
+ "step": 1761
+ },
+ {
+ "epoch": 1.7624957019161638,
+ "grad_norm": 0.6045622229576111,
+ "learning_rate": 7.272860650179006e-05,
+ "loss": 1.5047,
+ "step": 1762
+ },
+ {
+ "epoch": 1.7634959832452877,
+ "grad_norm": 0.5664568543434143,
+ "learning_rate": 7.262777028195081e-05,
+ "loss": 1.2122,
+ "step": 1763
+ },
+ {
+ "epoch": 1.7644962645744116,
+ "grad_norm": 0.6108324527740479,
+ "learning_rate": 7.252696413924174e-05,
+ "loss": 1.1857,
+ "step": 1764
+ },
+ {
+ "epoch": 1.7654965459035354,
+ "grad_norm": 0.6443206667900085,
+ "learning_rate": 7.242618818443056e-05,
+ "loss": 1.3842,
+ "step": 1765
+ },
+ {
+ "epoch": 1.7664968272326593,
+ "grad_norm": 0.7583184242248535,
+ "learning_rate": 7.232544252825189e-05,
+ "loss": 1.2639,
+ "step": 1766
+ },
+ {
+ "epoch": 1.767497108561783,
+ "grad_norm": 1.4318912029266357,
+ "learning_rate": 7.222472728140695e-05,
+ "loss": 1.0818,
+ "step": 1767
+ },
+ {
+ "epoch": 1.7684973898909067,
+ "grad_norm": 0.7584578394889832,
+ "learning_rate": 7.212404255456357e-05,
+ "loss": 1.3726,
+ "step": 1768
+ },
+ {
+ "epoch": 1.7694976712200305,
+ "grad_norm": 0.6667199730873108,
+ "learning_rate": 7.202338845835606e-05,
+ "loss": 1.4032,
+ "step": 1769
+ },
+ {
+ "epoch": 1.7704979525491544,
+ "grad_norm": 0.6628596782684326,
+ "learning_rate": 7.192276510338507e-05,
+ "loss": 1.3554,
+ "step": 1770
+ },
+ {
+ "epoch": 1.7714982338782783,
+ "grad_norm": 0.6599582433700562,
+ "learning_rate": 7.182217260021749e-05,
+ "loss": 1.4631,
+ "step": 1771
+ },
+ {
+ "epoch": 1.7724985152074022,
+ "grad_norm": 0.6616823077201843,
+ "learning_rate": 7.172161105938624e-05,
+ "loss": 1.2651,
+ "step": 1772
+ },
+ {
+ "epoch": 1.7734987965365259,
+ "grad_norm": 0.7346659898757935,
+ "learning_rate": 7.162108059139032e-05,
+ "loss": 1.2884,
+ "step": 1773
+ },
+ {
+ "epoch": 1.7744990778656498,
+ "grad_norm": 0.7243573069572449,
+ "learning_rate": 7.15205813066945e-05,
+ "loss": 1.5171,
+ "step": 1774
+ },
+ {
+ "epoch": 1.7754993591947734,
+ "grad_norm": 0.7012961506843567,
+ "learning_rate": 7.142011331572936e-05,
+ "loss": 1.3929,
+ "step": 1775
+ },
+ {
+ "epoch": 1.7764996405238973,
+ "grad_norm": 0.7035613059997559,
+ "learning_rate": 7.131967672889101e-05,
+ "loss": 1.3625,
+ "step": 1776
+ },
+ {
+ "epoch": 1.7774999218530212,
+ "grad_norm": 0.6865414977073669,
+ "learning_rate": 7.121927165654109e-05,
+ "loss": 1.34,
+ "step": 1777
+ },
+ {
+ "epoch": 1.778500203182145,
+ "grad_norm": 0.6221490502357483,
+ "learning_rate": 7.111889820900664e-05,
+ "loss": 1.4009,
+ "step": 1778
+ },
+ {
+ "epoch": 1.7795004845112687,
+ "grad_norm": 0.7319238185882568,
+ "learning_rate": 7.101855649657991e-05,
+ "loss": 1.4471,
+ "step": 1779
+ },
+ {
+ "epoch": 1.7805007658403926,
+ "grad_norm": 0.6840189695358276,
+ "learning_rate": 7.091824662951827e-05,
+ "loss": 1.256,
+ "step": 1780
+ },
+ {
+ "epoch": 1.7815010471695163,
+ "grad_norm": 0.6095514297485352,
+ "learning_rate": 7.08179687180442e-05,
+ "loss": 1.2354,
+ "step": 1781
+ },
+ {
+ "epoch": 1.7825013284986402,
+ "grad_norm": 0.6160232424736023,
+ "learning_rate": 7.071772287234497e-05,
+ "loss": 1.2264,
+ "step": 1782
+ },
+ {
+ "epoch": 1.783501609827764,
+ "grad_norm": 0.8078712224960327,
+ "learning_rate": 7.06175092025726e-05,
+ "loss": 1.5183,
+ "step": 1783
+ },
+ {
+ "epoch": 1.784501891156888,
+ "grad_norm": 0.7782847881317139,
+ "learning_rate": 7.051732781884378e-05,
+ "loss": 1.3455,
+ "step": 1784
+ },
+ {
+ "epoch": 1.7855021724860118,
+ "grad_norm": 0.6435388326644897,
+ "learning_rate": 7.041717883123977e-05,
+ "loss": 1.3738,
+ "step": 1785
+ },
+ {
+ "epoch": 1.7865024538151355,
+ "grad_norm": 0.6329066157341003,
+ "learning_rate": 7.031706234980617e-05,
+ "loss": 1.3574,
+ "step": 1786
+ },
+ {
+ "epoch": 1.7875027351442592,
+ "grad_norm": 0.7085026502609253,
+ "learning_rate": 7.021697848455291e-05,
+ "loss": 1.4758,
+ "step": 1787
+ },
+ {
+ "epoch": 1.788503016473383,
+ "grad_norm": 0.6369369029998779,
+ "learning_rate": 7.011692734545403e-05,
+ "loss": 1.4166,
+ "step": 1788
+ },
+ {
+ "epoch": 1.789503297802507,
+ "grad_norm": 0.5777859687805176,
+ "learning_rate": 7.001690904244767e-05,
+ "loss": 1.2348,
+ "step": 1789
+ },
+ {
+ "epoch": 1.7905035791316308,
+ "grad_norm": 0.6327416896820068,
+ "learning_rate": 6.991692368543584e-05,
+ "loss": 1.3229,
+ "step": 1790
+ },
+ {
+ "epoch": 1.7915038604607547,
+ "grad_norm": 0.6739888191223145,
+ "learning_rate": 6.981697138428434e-05,
+ "loss": 1.3607,
+ "step": 1791
+ },
+ {
+ "epoch": 1.7925041417898784,
+ "grad_norm": 0.6856040954589844,
+ "learning_rate": 6.971705224882271e-05,
+ "loss": 1.3359,
+ "step": 1792
+ },
+ {
+ "epoch": 1.7935044231190023,
+ "grad_norm": 0.6209323406219482,
+ "learning_rate": 6.9617166388844e-05,
+ "loss": 1.1944,
+ "step": 1793
+ },
+ {
+ "epoch": 1.794504704448126,
+ "grad_norm": 0.6018400192260742,
+ "learning_rate": 6.951731391410468e-05,
+ "loss": 1.2403,
+ "step": 1794
+ },
+ {
+ "epoch": 1.7955049857772498,
+ "grad_norm": 0.7714535593986511,
+ "learning_rate": 6.94174949343246e-05,
+ "loss": 1.6654,
+ "step": 1795
+ },
+ {
+ "epoch": 1.7965052671063737,
+ "grad_norm": 0.6726022362709045,
+ "learning_rate": 6.931770955918674e-05,
+ "loss": 1.5234,
+ "step": 1796
+ },
+ {
+ "epoch": 1.7975055484354976,
+ "grad_norm": 0.6442826390266418,
+ "learning_rate": 6.921795789833723e-05,
+ "loss": 1.4253,
+ "step": 1797
+ },
+ {
+ "epoch": 1.7985058297646213,
+ "grad_norm": 0.6518099904060364,
+ "learning_rate": 6.911824006138503e-05,
+ "loss": 1.2595,
+ "step": 1798
+ },
+ {
+ "epoch": 1.7995061110937451,
+ "grad_norm": 0.6912478804588318,
+ "learning_rate": 6.901855615790206e-05,
+ "loss": 1.2241,
+ "step": 1799
+ },
+ {
+ "epoch": 1.8005063924228688,
+ "grad_norm": 0.7709338068962097,
+ "learning_rate": 6.891890629742288e-05,
+ "loss": 1.492,
+ "step": 1800
+ },
+ {
+ "epoch": 1.8015066737519927,
+ "grad_norm": 0.7332537770271301,
+ "learning_rate": 6.88192905894447e-05,
+ "loss": 1.2449,
+ "step": 1801
+ },
+ {
+ "epoch": 1.8025069550811166,
+ "grad_norm": 0.6881892681121826,
+ "learning_rate": 6.871970914342712e-05,
+ "loss": 1.5265,
+ "step": 1802
+ },
+ {
+ "epoch": 1.8035072364102405,
+ "grad_norm": 0.6630619764328003,
+ "learning_rate": 6.862016206879216e-05,
+ "loss": 1.4283,
+ "step": 1803
+ },
+ {
+ "epoch": 1.8045075177393644,
+ "grad_norm": 0.6601552367210388,
+ "learning_rate": 6.852064947492405e-05,
+ "loss": 1.296,
+ "step": 1804
+ },
+ {
+ "epoch": 1.805507799068488,
+ "grad_norm": 0.7896683216094971,
+ "learning_rate": 6.842117147116913e-05,
+ "loss": 1.3608,
+ "step": 1805
+ },
+ {
+ "epoch": 1.8065080803976117,
+ "grad_norm": 0.6206938624382019,
+ "learning_rate": 6.832172816683575e-05,
+ "loss": 1.2986,
+ "step": 1806
+ },
+ {
+ "epoch": 1.8075083617267356,
+ "grad_norm": 0.6818989515304565,
+ "learning_rate": 6.82223196711941e-05,
+ "loss": 1.3181,
+ "step": 1807
+ },
+ {
+ "epoch": 1.8085086430558595,
+ "grad_norm": 0.716175377368927,
+ "learning_rate": 6.812294609347615e-05,
+ "loss": 1.3468,
+ "step": 1808
+ },
+ {
+ "epoch": 1.8095089243849833,
+ "grad_norm": 0.6611238718032837,
+ "learning_rate": 6.802360754287547e-05,
+ "loss": 1.3609,
+ "step": 1809
+ },
+ {
+ "epoch": 1.8105092057141072,
+ "grad_norm": 0.637792706489563,
+ "learning_rate": 6.79243041285472e-05,
+ "loss": 1.3175,
+ "step": 1810
+ },
+ {
+ "epoch": 1.811509487043231,
+ "grad_norm": 0.6208909749984741,
+ "learning_rate": 6.782503595960782e-05,
+ "loss": 1.5203,
+ "step": 1811
+ },
+ {
+ "epoch": 1.8125097683723548,
+ "grad_norm": 0.8295003175735474,
+ "learning_rate": 6.772580314513508e-05,
+ "loss": 1.4409,
+ "step": 1812
+ },
+ {
+ "epoch": 1.8135100497014784,
+ "grad_norm": 0.6259089708328247,
+ "learning_rate": 6.762660579416791e-05,
+ "loss": 1.5972,
+ "step": 1813
+ },
+ {
+ "epoch": 1.8145103310306023,
+ "grad_norm": 0.6317711472511292,
+ "learning_rate": 6.752744401570625e-05,
+ "loss": 1.4977,
+ "step": 1814
+ },
+ {
+ "epoch": 1.8155106123597262,
+ "grad_norm": 0.6420071721076965,
+ "learning_rate": 6.742831791871096e-05,
+ "loss": 1.2393,
+ "step": 1815
+ },
+ {
+ "epoch": 1.81651089368885,
+ "grad_norm": 1.0690205097198486,
+ "learning_rate": 6.732922761210369e-05,
+ "loss": 1.1627,
+ "step": 1816
+ },
+ {
+ "epoch": 1.8175111750179738,
+ "grad_norm": 0.817659854888916,
+ "learning_rate": 6.723017320476679e-05,
+ "loss": 1.2888,
+ "step": 1817
+ },
+ {
+ "epoch": 1.8185114563470977,
+ "grad_norm": 0.5951409339904785,
+ "learning_rate": 6.713115480554313e-05,
+ "loss": 1.4495,
+ "step": 1818
+ },
+ {
+ "epoch": 1.8195117376762213,
+ "grad_norm": 0.6608357429504395,
+ "learning_rate": 6.7032172523236e-05,
+ "loss": 1.5617,
+ "step": 1819
+ },
+ {
+ "epoch": 1.8205120190053452,
+ "grad_norm": 0.5993407368659973,
+ "learning_rate": 6.693322646660906e-05,
+ "loss": 1.4812,
+ "step": 1820
+ },
+ {
+ "epoch": 1.821512300334469,
+ "grad_norm": 0.6179289817810059,
+ "learning_rate": 6.683431674438612e-05,
+ "loss": 1.2661,
+ "step": 1821
+ },
+ {
+ "epoch": 1.822512581663593,
+ "grad_norm": 0.6814618110656738,
+ "learning_rate": 6.673544346525107e-05,
+ "loss": 1.5599,
+ "step": 1822
+ },
+ {
+ "epoch": 1.8235128629927169,
+ "grad_norm": 0.7015717029571533,
+ "learning_rate": 6.663660673784777e-05,
+ "loss": 1.5481,
+ "step": 1823
+ },
+ {
+ "epoch": 1.8245131443218405,
+ "grad_norm": 0.6974764466285706,
+ "learning_rate": 6.653780667077985e-05,
+ "loss": 1.3893,
+ "step": 1824
+ },
+ {
+ "epoch": 1.8255134256509642,
+ "grad_norm": 0.614414393901825,
+ "learning_rate": 6.643904337261082e-05,
+ "loss": 1.4293,
+ "step": 1825
+ },
+ {
+ "epoch": 1.826513706980088,
+ "grad_norm": 0.6676987409591675,
+ "learning_rate": 6.634031695186362e-05,
+ "loss": 1.2622,
+ "step": 1826
+ },
+ {
+ "epoch": 1.827513988309212,
+ "grad_norm": 0.689335823059082,
+ "learning_rate": 6.624162751702076e-05,
+ "loss": 1.2908,
+ "step": 1827
+ },
+ {
+ "epoch": 1.8285142696383359,
+ "grad_norm": 0.6336010098457336,
+ "learning_rate": 6.614297517652409e-05,
+ "loss": 1.434,
+ "step": 1828
+ },
+ {
+ "epoch": 1.8295145509674597,
+ "grad_norm": 0.549472987651825,
+ "learning_rate": 6.604436003877464e-05,
+ "loss": 1.2401,
+ "step": 1829
+ },
+ {
+ "epoch": 1.8305148322965834,
+ "grad_norm": 0.6261748671531677,
+ "learning_rate": 6.594578221213265e-05,
+ "loss": 1.4202,
+ "step": 1830
+ },
+ {
+ "epoch": 1.831515113625707,
+ "grad_norm": 0.6431935429573059,
+ "learning_rate": 6.58472418049173e-05,
+ "loss": 1.4625,
+ "step": 1831
+ },
+ {
+ "epoch": 1.832515394954831,
+ "grad_norm": 0.726426362991333,
+ "learning_rate": 6.574873892540671e-05,
+ "loss": 1.4571,
+ "step": 1832
+ },
+ {
+ "epoch": 1.8335156762839548,
+ "grad_norm": 0.6399835348129272,
+ "learning_rate": 6.565027368183769e-05,
+ "loss": 1.5148,
+ "step": 1833
+ },
+ {
+ "epoch": 1.8345159576130787,
+ "grad_norm": 0.6948434114456177,
+ "learning_rate": 6.555184618240577e-05,
+ "loss": 1.3199,
+ "step": 1834
+ },
+ {
+ "epoch": 1.8355162389422026,
+ "grad_norm": 0.8539021611213684,
+ "learning_rate": 6.545345653526495e-05,
+ "loss": 1.4847,
+ "step": 1835
+ },
+ {
+ "epoch": 1.8365165202713263,
+ "grad_norm": 0.6419363617897034,
+ "learning_rate": 6.535510484852767e-05,
+ "loss": 1.4148,
+ "step": 1836
+ },
+ {
+ "epoch": 1.8375168016004502,
+ "grad_norm": 0.6716374158859253,
+ "learning_rate": 6.525679123026463e-05,
+ "loss": 1.3921,
+ "step": 1837
+ },
+ {
+ "epoch": 1.8385170829295738,
+ "grad_norm": 0.6869264841079712,
+ "learning_rate": 6.515851578850474e-05,
+ "loss": 1.5427,
+ "step": 1838
+ },
+ {
+ "epoch": 1.8395173642586977,
+ "grad_norm": 0.6462099552154541,
+ "learning_rate": 6.506027863123492e-05,
+ "loss": 1.44,
+ "step": 1839
+ },
+ {
+ "epoch": 1.8405176455878216,
+ "grad_norm": 0.7004585266113281,
+ "learning_rate": 6.496207986640004e-05,
+ "loss": 1.352,
+ "step": 1840
+ },
+ {
+ "epoch": 1.8415179269169455,
+ "grad_norm": 0.6404154896736145,
+ "learning_rate": 6.48639196019028e-05,
+ "loss": 1.309,
+ "step": 1841
+ },
+ {
+ "epoch": 1.8425182082460692,
+ "grad_norm": 0.6515551209449768,
+ "learning_rate": 6.476579794560356e-05,
+ "loss": 1.1967,
+ "step": 1842
+ },
+ {
+ "epoch": 1.843518489575193,
+ "grad_norm": 0.6698777675628662,
+ "learning_rate": 6.46677150053203e-05,
+ "loss": 1.4054,
+ "step": 1843
+ },
+ {
+ "epoch": 1.8445187709043167,
+ "grad_norm": 0.5269995927810669,
+ "learning_rate": 6.45696708888284e-05,
+ "loss": 1.3496,
+ "step": 1844
+ },
+ {
+ "epoch": 1.8455190522334406,
+ "grad_norm": 0.7120770215988159,
+ "learning_rate": 6.447166570386063e-05,
+ "loss": 1.449,
+ "step": 1845
+ },
+ {
+ "epoch": 1.8465193335625645,
+ "grad_norm": 0.6120896935462952,
+ "learning_rate": 6.437369955810699e-05,
+ "loss": 1.3471,
+ "step": 1846
+ },
+ {
+ "epoch": 1.8475196148916884,
+ "grad_norm": 0.7216696739196777,
+ "learning_rate": 6.42757725592145e-05,
+ "loss": 1.681,
+ "step": 1847
+ },
+ {
+ "epoch": 1.8485198962208123,
+ "grad_norm": 0.7460939288139343,
+ "learning_rate": 6.417788481478728e-05,
+ "loss": 1.3102,
+ "step": 1848
+ },
+ {
+ "epoch": 1.849520177549936,
+ "grad_norm": 0.6691422462463379,
+ "learning_rate": 6.40800364323862e-05,
+ "loss": 1.2691,
+ "step": 1849
+ },
+ {
+ "epoch": 1.8505204588790596,
+ "grad_norm": 0.5813978314399719,
+ "learning_rate": 6.398222751952899e-05,
+ "loss": 1.3182,
+ "step": 1850
+ },
+ {
+ "epoch": 1.8515207402081835,
+ "grad_norm": 0.7524621486663818,
+ "learning_rate": 6.388445818368991e-05,
+ "loss": 1.2682,
+ "step": 1851
+ },
+ {
+ "epoch": 1.8525210215373074,
+ "grad_norm": 0.6798551678657532,
+ "learning_rate": 6.378672853229981e-05,
+ "loss": 1.3795,
+ "step": 1852
+ },
+ {
+ "epoch": 1.8535213028664312,
+ "grad_norm": 0.6338953971862793,
+ "learning_rate": 6.368903867274585e-05,
+ "loss": 1.3801,
+ "step": 1853
+ },
+ {
+ "epoch": 1.8545215841955551,
+ "grad_norm": 0.6461024880409241,
+ "learning_rate": 6.35913887123716e-05,
+ "loss": 1.3359,
+ "step": 1854
+ },
+ {
+ "epoch": 1.8555218655246788,
+ "grad_norm": 0.7232131958007812,
+ "learning_rate": 6.34937787584767e-05,
+ "loss": 1.4629,
+ "step": 1855
+ },
+ {
+ "epoch": 1.8565221468538027,
+ "grad_norm": 0.6489596366882324,
+ "learning_rate": 6.339620891831678e-05,
+ "loss": 1.2078,
+ "step": 1856
+ },
+ {
+ "epoch": 1.8575224281829263,
+ "grad_norm": 0.5821114778518677,
+ "learning_rate": 6.329867929910347e-05,
+ "loss": 1.5002,
+ "step": 1857
+ },
+ {
+ "epoch": 1.8585227095120502,
+ "grad_norm": 0.5860056281089783,
+ "learning_rate": 6.32011900080042e-05,
+ "loss": 1.1737,
+ "step": 1858
+ },
+ {
+ "epoch": 1.8595229908411741,
+ "grad_norm": 0.5989000797271729,
+ "learning_rate": 6.310374115214204e-05,
+ "loss": 1.3945,
+ "step": 1859
+ },
+ {
+ "epoch": 1.860523272170298,
+ "grad_norm": 0.7010142803192139,
+ "learning_rate": 6.30063328385957e-05,
+ "loss": 1.4431,
+ "step": 1860
+ },
+ {
+ "epoch": 1.8615235534994217,
+ "grad_norm": 0.5994375348091125,
+ "learning_rate": 6.290896517439925e-05,
+ "loss": 1.5378,
+ "step": 1861
+ },
+ {
+ "epoch": 1.8625238348285456,
+ "grad_norm": 0.7813047170639038,
+ "learning_rate": 6.281163826654218e-05,
+ "loss": 1.4424,
+ "step": 1862
+ },
+ {
+ "epoch": 1.8635241161576692,
+ "grad_norm": 0.8812029361724854,
+ "learning_rate": 6.271435222196916e-05,
+ "loss": 1.4378,
+ "step": 1863
+ },
+ {
+ "epoch": 1.864524397486793,
+ "grad_norm": 0.7037883400917053,
+ "learning_rate": 6.261710714757994e-05,
+ "loss": 1.5074,
+ "step": 1864
+ },
+ {
+ "epoch": 1.865524678815917,
+ "grad_norm": 0.6375555396080017,
+ "learning_rate": 6.251990315022927e-05,
+ "loss": 1.5777,
+ "step": 1865
+ },
+ {
+ "epoch": 1.8665249601450409,
+ "grad_norm": 0.7071056365966797,
+ "learning_rate": 6.24227403367268e-05,
+ "loss": 1.5755,
+ "step": 1866
+ },
+ {
+ "epoch": 1.8675252414741648,
+ "grad_norm": 0.69529789686203,
+ "learning_rate": 6.232561881383687e-05,
+ "loss": 1.5843,
+ "step": 1867
+ },
+ {
+ "epoch": 1.8685255228032884,
+ "grad_norm": 0.7598209977149963,
+ "learning_rate": 6.222853868827839e-05,
+ "loss": 1.3657,
+ "step": 1868
+ },
+ {
+ "epoch": 1.869525804132412,
+ "grad_norm": 0.6517311930656433,
+ "learning_rate": 6.213150006672499e-05,
+ "loss": 1.5294,
+ "step": 1869
+ },
+ {
+ "epoch": 1.870526085461536,
+ "grad_norm": 0.5940656065940857,
+ "learning_rate": 6.20345030558045e-05,
+ "loss": 1.4061,
+ "step": 1870
+ },
+ {
+ "epoch": 1.8715263667906599,
+ "grad_norm": 0.6668642163276672,
+ "learning_rate": 6.193754776209911e-05,
+ "loss": 1.339,
+ "step": 1871
+ },
+ {
+ "epoch": 1.8725266481197838,
+ "grad_norm": 0.6001901626586914,
+ "learning_rate": 6.184063429214515e-05,
+ "loss": 1.2836,
+ "step": 1872
+ },
+ {
+ "epoch": 1.8735269294489076,
+ "grad_norm": 0.703733503818512,
+ "learning_rate": 6.174376275243299e-05,
+ "loss": 1.3441,
+ "step": 1873
+ },
+ {
+ "epoch": 1.8745272107780313,
+ "grad_norm": 0.5734414458274841,
+ "learning_rate": 6.164693324940694e-05,
+ "loss": 1.09,
+ "step": 1874
+ },
+ {
+ "epoch": 1.8755274921071552,
+ "grad_norm": 0.6441298127174377,
+ "learning_rate": 6.15501458894651e-05,
+ "loss": 1.2849,
+ "step": 1875
+ },
+ {
+ "epoch": 1.8765277734362789,
+ "grad_norm": 0.6523350477218628,
+ "learning_rate": 6.145340077895929e-05,
+ "loss": 1.4681,
+ "step": 1876
+ },
+ {
+ "epoch": 1.8775280547654027,
+ "grad_norm": 0.6061530113220215,
+ "learning_rate": 6.135669802419488e-05,
+ "loss": 1.3961,
+ "step": 1877
+ },
+ {
+ "epoch": 1.8785283360945266,
+ "grad_norm": 0.7068478465080261,
+ "learning_rate": 6.126003773143072e-05,
+ "loss": 1.2469,
+ "step": 1878
+ },
+ {
+ "epoch": 1.8795286174236505,
+ "grad_norm": 0.6139722466468811,
+ "learning_rate": 6.116342000687896e-05,
+ "loss": 1.4774,
+ "step": 1879
+ },
+ {
+ "epoch": 1.8805288987527742,
+ "grad_norm": 0.7666826248168945,
+ "learning_rate": 6.106684495670506e-05,
+ "loss": 1.4634,
+ "step": 1880
+ },
+ {
+ "epoch": 1.881529180081898,
+ "grad_norm": 0.5872985124588013,
+ "learning_rate": 6.097031268702746e-05,
+ "loss": 1.2302,
+ "step": 1881
+ },
+ {
+ "epoch": 1.8825294614110217,
+ "grad_norm": 0.6170175075531006,
+ "learning_rate": 6.087382330391774e-05,
+ "loss": 1.0561,
+ "step": 1882
+ },
+ {
+ "epoch": 1.8835297427401456,
+ "grad_norm": 0.7397921085357666,
+ "learning_rate": 6.077737691340023e-05,
+ "loss": 1.3574,
+ "step": 1883
+ },
+ {
+ "epoch": 1.8845300240692695,
+ "grad_norm": 0.5703612565994263,
+ "learning_rate": 6.0680973621452105e-05,
+ "loss": 1.2651,
+ "step": 1884
+ },
+ {
+ "epoch": 1.8855303053983934,
+ "grad_norm": 0.6688309907913208,
+ "learning_rate": 6.0584613534003144e-05,
+ "loss": 1.5202,
+ "step": 1885
+ },
+ {
+ "epoch": 1.8865305867275173,
+ "grad_norm": 0.6952941417694092,
+ "learning_rate": 6.0488296756935636e-05,
+ "loss": 1.3407,
+ "step": 1886
+ },
+ {
+ "epoch": 1.887530868056641,
+ "grad_norm": 0.6440621018409729,
+ "learning_rate": 6.039202339608432e-05,
+ "loss": 1.3836,
+ "step": 1887
+ },
+ {
+ "epoch": 1.8885311493857646,
+ "grad_norm": 0.7453868985176086,
+ "learning_rate": 6.0295793557236203e-05,
+ "loss": 1.3744,
+ "step": 1888
+ },
+ {
+ "epoch": 1.8895314307148885,
+ "grad_norm": 0.5936272740364075,
+ "learning_rate": 6.019960734613047e-05,
+ "loss": 1.2957,
+ "step": 1889
+ },
+ {
+ "epoch": 1.8905317120440124,
+ "grad_norm": 0.6652967929840088,
+ "learning_rate": 6.010346486845837e-05,
+ "loss": 1.3191,
+ "step": 1890
+ },
+ {
+ "epoch": 1.8915319933731363,
+ "grad_norm": 0.5736771821975708,
+ "learning_rate": 6.0007366229863117e-05,
+ "loss": 1.2785,
+ "step": 1891
+ },
+ {
+ "epoch": 1.8925322747022602,
+ "grad_norm": 0.6693833470344543,
+ "learning_rate": 5.991131153593971e-05,
+ "loss": 1.2598,
+ "step": 1892
+ },
+ {
+ "epoch": 1.8935325560313838,
+ "grad_norm": 0.6824096441268921,
+ "learning_rate": 5.981530089223489e-05,
+ "loss": 1.6205,
+ "step": 1893
+ },
+ {
+ "epoch": 1.8945328373605075,
+ "grad_norm": 0.6346132159233093,
+ "learning_rate": 5.971933440424703e-05,
+ "loss": 1.2442,
+ "step": 1894
+ },
+ {
+ "epoch": 1.8955331186896314,
+ "grad_norm": 0.6125045418739319,
+ "learning_rate": 5.9623412177425886e-05,
+ "loss": 1.3326,
+ "step": 1895
+ },
+ {
+ "epoch": 1.8965334000187553,
+ "grad_norm": 0.5784642696380615,
+ "learning_rate": 5.952753431717268e-05,
+ "loss": 1.2986,
+ "step": 1896
+ },
+ {
+ "epoch": 1.8975336813478791,
+ "grad_norm": 0.6546468138694763,
+ "learning_rate": 5.9431700928839805e-05,
+ "loss": 1.2985,
+ "step": 1897
+ },
+ {
+ "epoch": 1.898533962677003,
+ "grad_norm": 0.6771805286407471,
+ "learning_rate": 5.933591211773082e-05,
+ "loss": 1.3101,
+ "step": 1898
+ },
+ {
+ "epoch": 1.8995342440061267,
+ "grad_norm": 0.672447681427002,
+ "learning_rate": 5.924016798910037e-05,
+ "loss": 1.3677,
+ "step": 1899
+ },
+ {
+ "epoch": 1.9005345253352506,
+ "grad_norm": 0.6948104500770569,
+ "learning_rate": 5.914446864815388e-05,
+ "loss": 1.2893,
+ "step": 1900
+ },
+ {
+ "epoch": 1.9015348066643742,
+ "grad_norm": 0.6217272877693176,
+ "learning_rate": 5.9048814200047675e-05,
+ "loss": 1.3604,
+ "step": 1901
+ },
+ {
+ "epoch": 1.9025350879934981,
+ "grad_norm": 0.7615220546722412,
+ "learning_rate": 5.895320474988864e-05,
+ "loss": 1.4518,
+ "step": 1902
+ },
+ {
+ "epoch": 1.903535369322622,
+ "grad_norm": 0.6633756160736084,
+ "learning_rate": 5.885764040273426e-05,
+ "loss": 1.4675,
+ "step": 1903
+ },
+ {
+ "epoch": 1.904535650651746,
+ "grad_norm": 0.877419114112854,
+ "learning_rate": 5.876212126359251e-05,
+ "loss": 1.2659,
+ "step": 1904
+ },
+ {
+ "epoch": 1.9055359319808696,
+ "grad_norm": 0.6730911731719971,
+ "learning_rate": 5.866664743742162e-05,
+ "loss": 1.4772,
+ "step": 1905
+ },
+ {
+ "epoch": 1.9065362133099935,
+ "grad_norm": 0.6866170763969421,
+ "learning_rate": 5.857121902913008e-05,
+ "loss": 1.503,
+ "step": 1906
+ },
+ {
+ "epoch": 1.9075364946391171,
+ "grad_norm": 0.6140307784080505,
+ "learning_rate": 5.8475836143576433e-05,
+ "loss": 1.3118,
+ "step": 1907
+ },
+ {
+ "epoch": 1.908536775968241,
+ "grad_norm": 0.6074461340904236,
+ "learning_rate": 5.838049888556925e-05,
+ "loss": 1.2815,
+ "step": 1908
+ },
+ {
+ "epoch": 1.909537057297365,
+ "grad_norm": 0.6943862438201904,
+ "learning_rate": 5.8285207359866936e-05,
+ "loss": 1.2693,
+ "step": 1909
+ },
+ {
+ "epoch": 1.9105373386264888,
+ "grad_norm": 0.7455725073814392,
+ "learning_rate": 5.8189961671177574e-05,
+ "loss": 1.6509,
+ "step": 1910
+ },
+ {
+ "epoch": 1.9115376199556127,
+ "grad_norm": 0.645656943321228,
+ "learning_rate": 5.809476192415905e-05,
+ "loss": 1.3285,
+ "step": 1911
+ },
+ {
+ "epoch": 1.9125379012847363,
+ "grad_norm": 0.6280725002288818,
+ "learning_rate": 5.7999608223418534e-05,
+ "loss": 1.1409,
+ "step": 1912
+ },
+ {
+ "epoch": 1.91353818261386,
+ "grad_norm": 0.6084076762199402,
+ "learning_rate": 5.790450067351291e-05,
+ "loss": 1.4,
+ "step": 1913
+ },
+ {
+ "epoch": 1.9145384639429839,
+ "grad_norm": 0.5778687596321106,
+ "learning_rate": 5.780943937894805e-05,
+ "loss": 1.4359,
+ "step": 1914
+ },
+ {
+ "epoch": 1.9155387452721078,
+ "grad_norm": 0.5729363560676575,
+ "learning_rate": 5.771442444417918e-05,
+ "loss": 1.1936,
+ "step": 1915
+ },
+ {
+ "epoch": 1.9165390266012317,
+ "grad_norm": 0.5981405973434448,
+ "learning_rate": 5.761945597361054e-05,
+ "loss": 1.4219,
+ "step": 1916
+ },
+ {
+ "epoch": 1.9175393079303555,
+ "grad_norm": 0.6797769069671631,
+ "learning_rate": 5.752453407159522e-05,
+ "loss": 1.2791,
+ "step": 1917
+ },
+ {
+ "epoch": 1.9185395892594792,
+ "grad_norm": 0.6143385171890259,
+ "learning_rate": 5.742965884243532e-05,
+ "loss": 1.1946,
+ "step": 1918
+ },
+ {
+ "epoch": 1.919539870588603,
+ "grad_norm": 0.7437219023704529,
+ "learning_rate": 5.733483039038149e-05,
+ "loss": 1.4242,
+ "step": 1919
+ },
+ {
+ "epoch": 1.9205401519177268,
+ "grad_norm": 0.6434136033058167,
+ "learning_rate": 5.724004881963311e-05,
+ "loss": 1.3105,
+ "step": 1920
+ },
+ {
+ "epoch": 1.9215404332468506,
+ "grad_norm": 0.6449226140975952,
+ "learning_rate": 5.714531423433791e-05,
+ "loss": 1.3863,
+ "step": 1921
+ },
+ {
+ "epoch": 1.9225407145759745,
+ "grad_norm": 0.7470558881759644,
+ "learning_rate": 5.705062673859216e-05,
+ "loss": 1.3502,
+ "step": 1922
+ },
+ {
+ "epoch": 1.9235409959050984,
+ "grad_norm": 0.6595833897590637,
+ "learning_rate": 5.69559864364402e-05,
+ "loss": 1.4515,
+ "step": 1923
+ },
+ {
+ "epoch": 1.924541277234222,
+ "grad_norm": 0.6996669769287109,
+ "learning_rate": 5.6861393431874675e-05,
+ "loss": 1.5347,
+ "step": 1924
+ },
+ {
+ "epoch": 1.925541558563346,
+ "grad_norm": 0.6640759706497192,
+ "learning_rate": 5.676684782883615e-05,
+ "loss": 1.322,
+ "step": 1925
+ },
+ {
+ "epoch": 1.9265418398924696,
+ "grad_norm": 0.6044187545776367,
+ "learning_rate": 5.667234973121317e-05,
+ "loss": 1.3457,
+ "step": 1926
+ },
+ {
+ "epoch": 1.9275421212215935,
+ "grad_norm": 0.6154062747955322,
+ "learning_rate": 5.6577899242842025e-05,
+ "loss": 1.4132,
+ "step": 1927
+ },
+ {
+ "epoch": 1.9285424025507174,
+ "grad_norm": 0.730377733707428,
+ "learning_rate": 5.648349646750673e-05,
+ "loss": 1.4286,
+ "step": 1928
+ },
+ {
+ "epoch": 1.9295426838798413,
+ "grad_norm": 0.6812162399291992,
+ "learning_rate": 5.6389141508938903e-05,
+ "loss": 1.4494,
+ "step": 1929
+ },
+ {
+ "epoch": 1.9305429652089652,
+ "grad_norm": 0.6427568197250366,
+ "learning_rate": 5.629483447081751e-05,
+ "loss": 1.4093,
+ "step": 1930
+ },
+ {
+ "epoch": 1.9315432465380888,
+ "grad_norm": 0.672756016254425,
+ "learning_rate": 5.620057545676901e-05,
+ "loss": 1.3817,
+ "step": 1931
+ },
+ {
+ "epoch": 1.9325435278672125,
+ "grad_norm": 0.6241796016693115,
+ "learning_rate": 5.610636457036693e-05,
+ "loss": 1.3074,
+ "step": 1932
+ },
+ {
+ "epoch": 1.9335438091963364,
+ "grad_norm": 0.5853481888771057,
+ "learning_rate": 5.601220191513208e-05,
+ "loss": 1.3297,
+ "step": 1933
+ },
+ {
+ "epoch": 1.9345440905254603,
+ "grad_norm": 0.5953206419944763,
+ "learning_rate": 5.591808759453214e-05,
+ "loss": 1.4803,
+ "step": 1934
+ },
+ {
+ "epoch": 1.9355443718545842,
+ "grad_norm": 0.6578570008277893,
+ "learning_rate": 5.5824021711981686e-05,
+ "loss": 1.1907,
+ "step": 1935
+ },
+ {
+ "epoch": 1.936544653183708,
+ "grad_norm": 0.7336297035217285,
+ "learning_rate": 5.573000437084221e-05,
+ "loss": 1.3186,
+ "step": 1936
+ },
+ {
+ "epoch": 1.9375449345128317,
+ "grad_norm": 0.594570517539978,
+ "learning_rate": 5.563603567442168e-05,
+ "loss": 1.499,
+ "step": 1937
+ },
+ {
+ "epoch": 1.9385452158419556,
+ "grad_norm": 0.6666537523269653,
+ "learning_rate": 5.554211572597477e-05,
+ "loss": 1.4292,
+ "step": 1938
+ },
+ {
+ "epoch": 1.9395454971710793,
+ "grad_norm": 0.6429474353790283,
+ "learning_rate": 5.544824462870244e-05,
+ "loss": 1.5317,
+ "step": 1939
+ },
+ {
+ "epoch": 1.9405457785002032,
+ "grad_norm": 0.6204141974449158,
+ "learning_rate": 5.5354422485752125e-05,
+ "loss": 1.3496,
+ "step": 1940
+ },
+ {
+ "epoch": 1.941546059829327,
+ "grad_norm": 0.6017738580703735,
+ "learning_rate": 5.5260649400217326e-05,
+ "loss": 1.2879,
+ "step": 1941
+ },
+ {
+ "epoch": 1.942546341158451,
+ "grad_norm": 0.7276145219802856,
+ "learning_rate": 5.5166925475137735e-05,
+ "loss": 1.4061,
+ "step": 1942
+ },
+ {
+ "epoch": 1.9435466224875746,
+ "grad_norm": 0.5995808243751526,
+ "learning_rate": 5.507325081349903e-05,
+ "loss": 1.3676,
+ "step": 1943
+ },
+ {
+ "epoch": 1.9445469038166985,
+ "grad_norm": 0.7158801555633545,
+ "learning_rate": 5.497962551823266e-05,
+ "loss": 1.4506,
+ "step": 1944
+ },
+ {
+ "epoch": 1.9455471851458221,
+ "grad_norm": 0.7192680239677429,
+ "learning_rate": 5.488604969221597e-05,
+ "loss": 1.276,
+ "step": 1945
+ },
+ {
+ "epoch": 1.946547466474946,
+ "grad_norm": 0.5900127291679382,
+ "learning_rate": 5.479252343827178e-05,
+ "loss": 1.2548,
+ "step": 1946
+ },
+ {
+ "epoch": 1.94754774780407,
+ "grad_norm": 0.8271359801292419,
+ "learning_rate": 5.469904685916861e-05,
+ "loss": 1.2133,
+ "step": 1947
+ },
+ {
+ "epoch": 1.9485480291331938,
+ "grad_norm": 0.6850492358207703,
+ "learning_rate": 5.460562005762024e-05,
+ "loss": 1.4255,
+ "step": 1948
+ },
+ {
+ "epoch": 1.9495483104623177,
+ "grad_norm": 0.6316859722137451,
+ "learning_rate": 5.4512243136285915e-05,
+ "loss": 1.2897,
+ "step": 1949
+ },
+ {
+ "epoch": 1.9505485917914414,
+ "grad_norm": 0.6135215759277344,
+ "learning_rate": 5.441891619776987e-05,
+ "loss": 1.3193,
+ "step": 1950
+ },
+ {
+ "epoch": 1.951548873120565,
+ "grad_norm": 0.7005714178085327,
+ "learning_rate": 5.432563934462166e-05,
+ "loss": 1.4412,
+ "step": 1951
+ },
+ {
+ "epoch": 1.952549154449689,
+ "grad_norm": 0.6692869067192078,
+ "learning_rate": 5.423241267933557e-05,
+ "loss": 1.2523,
+ "step": 1952
+ },
+ {
+ "epoch": 1.9535494357788128,
+ "grad_norm": 0.5568224191665649,
+ "learning_rate": 5.4139236304350935e-05,
+ "loss": 1.3503,
+ "step": 1953
+ },
+ {
+ "epoch": 1.9545497171079367,
+ "grad_norm": 0.6143470406532288,
+ "learning_rate": 5.404611032205169e-05,
+ "loss": 1.4381,
+ "step": 1954
+ },
+ {
+ "epoch": 1.9555499984370606,
+ "grad_norm": 0.6193466782569885,
+ "learning_rate": 5.3953034834766416e-05,
+ "loss": 1.4821,
+ "step": 1955
+ },
+ {
+ "epoch": 1.9565502797661842,
+ "grad_norm": 0.6553237438201904,
+ "learning_rate": 5.386000994476832e-05,
+ "loss": 1.3022,
+ "step": 1956
+ },
+ {
+ "epoch": 1.957550561095308,
+ "grad_norm": 0.7644321918487549,
+ "learning_rate": 5.376703575427481e-05,
+ "loss": 1.444,
+ "step": 1957
+ },
+ {
+ "epoch": 1.9585508424244318,
+ "grad_norm": 0.720217227935791,
+ "learning_rate": 5.367411236544786e-05,
+ "loss": 1.46,
+ "step": 1958
+ },
+ {
+ "epoch": 1.9595511237535557,
+ "grad_norm": 0.6305975317955017,
+ "learning_rate": 5.3581239880393375e-05,
+ "loss": 1.5285,
+ "step": 1959
+ },
+ {
+ "epoch": 1.9605514050826796,
+ "grad_norm": 0.5860128998756409,
+ "learning_rate": 5.3488418401161475e-05,
+ "loss": 1.254,
+ "step": 1960
+ },
+ {
+ "epoch": 1.9615516864118034,
+ "grad_norm": 0.6627411842346191,
+ "learning_rate": 5.339564802974615e-05,
+ "loss": 1.2639,
+ "step": 1961
+ },
+ {
+ "epoch": 1.962551967740927,
+ "grad_norm": 0.614637017250061,
+ "learning_rate": 5.33029288680852e-05,
+ "loss": 1.3714,
+ "step": 1962
+ },
+ {
+ "epoch": 1.963552249070051,
+ "grad_norm": 0.6468274593353271,
+ "learning_rate": 5.321026101806032e-05,
+ "loss": 1.3802,
+ "step": 1963
+ },
+ {
+ "epoch": 1.9645525303991747,
+ "grad_norm": 0.6303175091743469,
+ "learning_rate": 5.311764458149664e-05,
+ "loss": 1.3505,
+ "step": 1964
+ },
+ {
+ "epoch": 1.9655528117282985,
+ "grad_norm": 0.6070892214775085,
+ "learning_rate": 5.302507966016295e-05,
+ "loss": 1.4039,
+ "step": 1965
+ },
+ {
+ "epoch": 1.9665530930574224,
+ "grad_norm": 0.6614121198654175,
+ "learning_rate": 5.293256635577126e-05,
+ "loss": 1.324,
+ "step": 1966
+ },
+ {
+ "epoch": 1.9675533743865463,
+ "grad_norm": 0.6081134676933289,
+ "learning_rate": 5.284010476997705e-05,
+ "loss": 1.2839,
+ "step": 1967
+ },
+ {
+ "epoch": 1.96855365571567,
+ "grad_norm": 0.707693338394165,
+ "learning_rate": 5.274769500437882e-05,
+ "loss": 1.4179,
+ "step": 1968
+ },
+ {
+ "epoch": 1.9695539370447939,
+ "grad_norm": 0.7113189697265625,
+ "learning_rate": 5.265533716051825e-05,
+ "loss": 1.1911,
+ "step": 1969
+ },
+ {
+ "epoch": 1.9705542183739175,
+ "grad_norm": 0.7755022048950195,
+ "learning_rate": 5.256303133987982e-05,
+ "loss": 1.3266,
+ "step": 1970
+ },
+ {
+ "epoch": 1.9715544997030414,
+ "grad_norm": 0.6285157203674316,
+ "learning_rate": 5.247077764389099e-05,
+ "loss": 1.2468,
+ "step": 1971
+ },
+ {
+ "epoch": 1.9725547810321653,
+ "grad_norm": 0.6110594272613525,
+ "learning_rate": 5.2378576173921934e-05,
+ "loss": 1.2845,
+ "step": 1972
+ },
+ {
+ "epoch": 1.9735550623612892,
+ "grad_norm": 0.7382394671440125,
+ "learning_rate": 5.22864270312853e-05,
+ "loss": 1.4531,
+ "step": 1973
+ },
+ {
+ "epoch": 1.974555343690413,
+ "grad_norm": 3.4922046661376953,
+ "learning_rate": 5.219433031723641e-05,
+ "loss": 1.174,
+ "step": 1974
+ },
+ {
+ "epoch": 1.9755556250195367,
+ "grad_norm": 0.5983368158340454,
+ "learning_rate": 5.210228613297281e-05,
+ "loss": 1.2157,
+ "step": 1975
+ },
+ {
+ "epoch": 1.9765559063486604,
+ "grad_norm": 0.6913344264030457,
+ "learning_rate": 5.201029457963451e-05,
+ "loss": 1.3994,
+ "step": 1976
+ },
+ {
+ "epoch": 1.9775561876777843,
+ "grad_norm": 0.5928197503089905,
+ "learning_rate": 5.191835575830352e-05,
+ "loss": 1.5831,
+ "step": 1977
+ },
+ {
+ "epoch": 1.9785564690069082,
+ "grad_norm": 0.6904213428497314,
+ "learning_rate": 5.1826469770004026e-05,
+ "loss": 1.3408,
+ "step": 1978
+ },
+ {
+ "epoch": 1.979556750336032,
+ "grad_norm": 0.6584596633911133,
+ "learning_rate": 5.1734636715702043e-05,
+ "loss": 1.3828,
+ "step": 1979
+ },
+ {
+ "epoch": 1.980557031665156,
+ "grad_norm": 0.7366130352020264,
+ "learning_rate": 5.1642856696305575e-05,
+ "loss": 1.5174,
+ "step": 1980
+ },
+ {
+ "epoch": 1.9815573129942796,
+ "grad_norm": 0.6994180679321289,
+ "learning_rate": 5.155112981266422e-05,
+ "loss": 1.6449,
+ "step": 1981
+ },
+ {
+ "epoch": 1.9825575943234035,
+ "grad_norm": 0.685383677482605,
+ "learning_rate": 5.145945616556921e-05,
+ "loss": 1.2966,
+ "step": 1982
+ },
+ {
+ "epoch": 1.9835578756525272,
+ "grad_norm": 0.6237232685089111,
+ "learning_rate": 5.136783585575336e-05,
+ "loss": 1.3721,
+ "step": 1983
+ },
+ {
+ "epoch": 1.984558156981651,
+ "grad_norm": 0.6665890216827393,
+ "learning_rate": 5.127626898389075e-05,
+ "loss": 1.3245,
+ "step": 1984
+ },
+ {
+ "epoch": 1.985558438310775,
+ "grad_norm": 0.607271134853363,
+ "learning_rate": 5.118475565059691e-05,
+ "loss": 1.1436,
+ "step": 1985
+ },
+ {
+ "epoch": 1.9865587196398988,
+ "grad_norm": 0.6433306336402893,
+ "learning_rate": 5.109329595642829e-05,
+ "loss": 1.521,
+ "step": 1986
+ },
+ {
+ "epoch": 1.9875590009690225,
+ "grad_norm": 0.6605822443962097,
+ "learning_rate": 5.1001890001882734e-05,
+ "loss": 1.3559,
+ "step": 1987
+ },
+ {
+ "epoch": 1.9885592822981464,
+ "grad_norm": 0.6377484202384949,
+ "learning_rate": 5.091053788739878e-05,
+ "loss": 1.3607,
+ "step": 1988
+ },
+ {
+ "epoch": 1.98955956362727,
+ "grad_norm": 0.6457136273384094,
+ "learning_rate": 5.081923971335582e-05,
+ "loss": 1.2594,
+ "step": 1989
+ },
+ {
+ "epoch": 1.990559844956394,
+ "grad_norm": 0.6249425411224365,
+ "learning_rate": 5.072799558007415e-05,
+ "loss": 1.4896,
+ "step": 1990
+ },
+ {
+ "epoch": 1.9915601262855178,
+ "grad_norm": 0.6000842452049255,
+ "learning_rate": 5.063680558781445e-05,
+ "loss": 1.1787,
+ "step": 1991
+ },
+ {
+ "epoch": 1.9925604076146417,
+ "grad_norm": 0.5997797250747681,
+ "learning_rate": 5.0545669836778144e-05,
+ "loss": 1.4464,
+ "step": 1992
+ },
+ {
+ "epoch": 1.9935606889437656,
+ "grad_norm": 0.665529191493988,
+ "learning_rate": 5.045458842710684e-05,
+ "loss": 1.5382,
+ "step": 1993
+ },
+ {
+ "epoch": 1.9945609702728893,
+ "grad_norm": 0.6204399466514587,
+ "learning_rate": 5.036356145888263e-05,
+ "loss": 1.218,
+ "step": 1994
+ },
+ {
+ "epoch": 1.995561251602013,
+ "grad_norm": 0.6627510786056519,
+ "learning_rate": 5.0272589032127594e-05,
+ "loss": 1.5239,
+ "step": 1995
+ },
+ {
+ "epoch": 1.9965615329311368,
+ "grad_norm": 0.708292543888092,
+ "learning_rate": 5.0181671246804064e-05,
+ "loss": 1.4107,
+ "step": 1996
+ },
+ {
+ "epoch": 1.9975618142602607,
+ "grad_norm": 0.5863770842552185,
+ "learning_rate": 5.009080820281415e-05,
+ "loss": 1.1877,
+ "step": 1997
+ },
+ {
+ "epoch": 1.9985620955893846,
+ "grad_norm": 0.5950385928153992,
+ "learning_rate": 5.000000000000002e-05,
+ "loss": 1.2165,
+ "step": 1998
+ },
+ {
+ "epoch": 1.9995623769185085,
+ "grad_norm": 0.6033083200454712,
+ "learning_rate": 4.990924673814336e-05,
+ "loss": 1.3102,
+ "step": 1999
+ },
+ {
+ "epoch": 2.0005626582476324,
+ "grad_norm": 0.5440614223480225,
+ "learning_rate": 4.981854851696568e-05,
+ "loss": 1.0143,
+ "step": 2000
+ },
+ {
+ "epoch": 2.001562939576756,
+ "grad_norm": 0.6712716221809387,
+ "learning_rate": 4.972790543612783e-05,
+ "loss": 1.5009,
+ "step": 2001
+ },
+ {
+ "epoch": 2.0025632209058797,
+ "grad_norm": 0.6744182109832764,
+ "learning_rate": 4.963731759523022e-05,
+ "loss": 1.388,
+ "step": 2002
+ },
+ {
+ "epoch": 2.0035635022350036,
+ "grad_norm": 0.6512018442153931,
+ "learning_rate": 4.954678509381253e-05,
+ "loss": 1.1774,
+ "step": 2003
+ },
+ {
+ "epoch": 2.0045637835641275,
+ "grad_norm": 0.6344821453094482,
+ "learning_rate": 4.945630803135354e-05,
+ "loss": 1.2775,
+ "step": 2004
+ },
+ {
+ "epoch": 2.0055640648932513,
+ "grad_norm": 0.6289503574371338,
+ "learning_rate": 4.9365886507271243e-05,
+ "loss": 1.0352,
+ "step": 2005
+ },
+ {
+ "epoch": 2.0065643462223752,
+ "grad_norm": 0.6312137842178345,
+ "learning_rate": 4.9275520620922477e-05,
+ "loss": 1.4537,
+ "step": 2006
+ },
+ {
+ "epoch": 2.0075646275514987,
+ "grad_norm": 0.6128604412078857,
+ "learning_rate": 4.918521047160308e-05,
+ "loss": 1.1413,
+ "step": 2007
+ },
+ {
+ "epoch": 2.0085649088806226,
+ "grad_norm": 0.651336669921875,
+ "learning_rate": 4.9094956158547535e-05,
+ "loss": 1.1284,
+ "step": 2008
+ },
+ {
+ "epoch": 2.0095651902097464,
+ "grad_norm": 0.6941661834716797,
+ "learning_rate": 4.900475778092897e-05,
+ "loss": 1.3201,
+ "step": 2009
+ },
+ {
+ "epoch": 2.0105654715388703,
+ "grad_norm": 0.7407479882240295,
+ "learning_rate": 4.891461543785917e-05,
+ "loss": 1.3644,
+ "step": 2010
+ },
+ {
+ "epoch": 2.011565752867994,
+ "grad_norm": 0.673768162727356,
+ "learning_rate": 4.882452922838818e-05,
+ "loss": 1.3236,
+ "step": 2011
+ },
+ {
+ "epoch": 2.012566034197118,
+ "grad_norm": 0.6047630310058594,
+ "learning_rate": 4.873449925150455e-05,
+ "loss": 1.1617,
+ "step": 2012
+ },
+ {
+ "epoch": 2.0135663155262415,
+ "grad_norm": 0.5809116363525391,
+ "learning_rate": 4.864452560613485e-05,
+ "loss": 1.1697,
+ "step": 2013
+ },
+ {
+ "epoch": 2.0145665968553654,
+ "grad_norm": 0.7134872078895569,
+ "learning_rate": 4.855460839114392e-05,
+ "loss": 1.1404,
+ "step": 2014
+ },
+ {
+ "epoch": 2.0155668781844893,
+ "grad_norm": 0.6681728959083557,
+ "learning_rate": 4.846474770533446e-05,
+ "loss": 1.1199,
+ "step": 2015
+ },
+ {
+ "epoch": 2.016567159513613,
+ "grad_norm": 0.7536188364028931,
+ "learning_rate": 4.837494364744711e-05,
+ "loss": 1.2809,
+ "step": 2016
+ },
+ {
+ "epoch": 2.017567440842737,
+ "grad_norm": 0.748410701751709,
+ "learning_rate": 4.828519631616037e-05,
+ "loss": 1.1881,
+ "step": 2017
+ },
+ {
+ "epoch": 2.018567722171861,
+ "grad_norm": 0.6426956653594971,
+ "learning_rate": 4.8195505810090246e-05,
+ "loss": 1.1731,
+ "step": 2018
+ },
+ {
+ "epoch": 2.019568003500985,
+ "grad_norm": 0.6496251225471497,
+ "learning_rate": 4.810587222779043e-05,
+ "loss": 0.8958,
+ "step": 2019
+ },
+ {
+ "epoch": 2.0205682848301083,
+ "grad_norm": 0.5894680619239807,
+ "learning_rate": 4.801629566775196e-05,
+ "loss": 1.3354,
+ "step": 2020
+ },
+ {
+ "epoch": 2.021568566159232,
+ "grad_norm": 0.6889281868934631,
+ "learning_rate": 4.792677622840336e-05,
+ "loss": 1.3185,
+ "step": 2021
+ },
+ {
+ "epoch": 2.022568847488356,
+ "grad_norm": 0.7822543978691101,
+ "learning_rate": 4.783731400811022e-05,
+ "loss": 1.281,
+ "step": 2022
+ },
+ {
+ "epoch": 2.02356912881748,
+ "grad_norm": 0.5691697001457214,
+ "learning_rate": 4.774790910517541e-05,
+ "loss": 1.0311,
+ "step": 2023
+ },
+ {
+ "epoch": 2.024569410146604,
+ "grad_norm": 0.5973305702209473,
+ "learning_rate": 4.7658561617838684e-05,
+ "loss": 1.1829,
+ "step": 2024
+ },
+ {
+ "epoch": 2.0255696914757277,
+ "grad_norm": 0.6182019114494324,
+ "learning_rate": 4.756927164427685e-05,
+ "loss": 1.2656,
+ "step": 2025
+ },
+ {
+ "epoch": 2.026569972804851,
+ "grad_norm": 0.5737195611000061,
+ "learning_rate": 4.748003928260335e-05,
+ "loss": 1.2637,
+ "step": 2026
+ },
+ {
+ "epoch": 2.027570254133975,
+ "grad_norm": 0.6673151254653931,
+ "learning_rate": 4.73908646308685e-05,
+ "loss": 1.2574,
+ "step": 2027
+ },
+ {
+ "epoch": 2.028570535463099,
+ "grad_norm": 0.5862526893615723,
+ "learning_rate": 4.730174778705908e-05,
+ "loss": 1.2799,
+ "step": 2028
+ },
+ {
+ "epoch": 2.029570816792223,
+ "grad_norm": 0.5774174928665161,
+ "learning_rate": 4.721268884909833e-05,
+ "loss": 1.2472,
+ "step": 2029
+ },
+ {
+ "epoch": 2.0305710981213467,
+ "grad_norm": 0.6707743406295776,
+ "learning_rate": 4.712368791484597e-05,
+ "loss": 1.101,
+ "step": 2030
+ },
+ {
+ "epoch": 2.0315713794504706,
+ "grad_norm": 0.735133707523346,
+ "learning_rate": 4.703474508209793e-05,
+ "loss": 1.2771,
+ "step": 2031
+ },
+ {
+ "epoch": 2.032571660779594,
+ "grad_norm": 0.8130558729171753,
+ "learning_rate": 4.694586044858633e-05,
+ "loss": 1.1673,
+ "step": 2032
+ },
+ {
+ "epoch": 2.033571942108718,
+ "grad_norm": 0.6132652163505554,
+ "learning_rate": 4.6857034111979235e-05,
+ "loss": 1.1806,
+ "step": 2033
+ },
+ {
+ "epoch": 2.034572223437842,
+ "grad_norm": 0.6277550458908081,
+ "learning_rate": 4.6768266169880804e-05,
+ "loss": 1.2887,
+ "step": 2034
+ },
+ {
+ "epoch": 2.0355725047669657,
+ "grad_norm": 0.744972288608551,
+ "learning_rate": 4.66795567198309e-05,
+ "loss": 1.2582,
+ "step": 2035
+ },
+ {
+ "epoch": 2.0365727860960896,
+ "grad_norm": 1.3557803630828857,
+ "learning_rate": 4.6590905859305135e-05,
+ "loss": 1.1926,
+ "step": 2036
+ },
+ {
+ "epoch": 2.0375730674252135,
+ "grad_norm": 0.6581830978393555,
+ "learning_rate": 4.650231368571486e-05,
+ "loss": 1.1849,
+ "step": 2037
+ },
+ {
+ "epoch": 2.038573348754337,
+ "grad_norm": 0.8084182739257812,
+ "learning_rate": 4.6413780296406764e-05,
+ "loss": 1.3104,
+ "step": 2038
+ },
+ {
+ "epoch": 2.039573630083461,
+ "grad_norm": 0.7013028860092163,
+ "learning_rate": 4.6325305788663096e-05,
+ "loss": 1.4023,
+ "step": 2039
+ },
+ {
+ "epoch": 2.0405739114125847,
+ "grad_norm": 0.5487765073776245,
+ "learning_rate": 4.623689025970128e-05,
+ "loss": 1.1617,
+ "step": 2040
+ },
+ {
+ "epoch": 2.0415741927417086,
+ "grad_norm": 0.6320502758026123,
+ "learning_rate": 4.6148533806674074e-05,
+ "loss": 1.1965,
+ "step": 2041
+ },
+ {
+ "epoch": 2.0425744740708325,
+ "grad_norm": 0.6310489177703857,
+ "learning_rate": 4.606023652666915e-05,
+ "loss": 1.3635,
+ "step": 2042
+ },
+ {
+ "epoch": 2.0435747553999564,
+ "grad_norm": 0.6365699768066406,
+ "learning_rate": 4.597199851670932e-05,
+ "loss": 1.4671,
+ "step": 2043
+ },
+ {
+ "epoch": 2.0445750367290803,
+ "grad_norm": 0.6468609571456909,
+ "learning_rate": 4.5883819873752156e-05,
+ "loss": 1.3253,
+ "step": 2044
+ },
+ {
+ "epoch": 2.0455753180582037,
+ "grad_norm": 0.6045584678649902,
+ "learning_rate": 4.5795700694690046e-05,
+ "loss": 1.4587,
+ "step": 2045
+ },
+ {
+ "epoch": 2.0465755993873276,
+ "grad_norm": 0.6662184596061707,
+ "learning_rate": 4.5707641076350074e-05,
+ "loss": 1.2276,
+ "step": 2046
+ },
+ {
+ "epoch": 2.0475758807164515,
+ "grad_norm": 0.7220503687858582,
+ "learning_rate": 4.5619641115493774e-05,
+ "loss": 1.259,
+ "step": 2047
+ },
+ {
+ "epoch": 2.0485761620455754,
+ "grad_norm": 0.6610327959060669,
+ "learning_rate": 4.553170090881724e-05,
+ "loss": 1.4487,
+ "step": 2048
+ },
+ {
+ "epoch": 2.0495764433746992,
+ "grad_norm": 0.7678585052490234,
+ "learning_rate": 4.54438205529508e-05,
+ "loss": 1.205,
+ "step": 2049
+ },
+ {
+ "epoch": 2.050576724703823,
+ "grad_norm": 1.5810719728469849,
+ "learning_rate": 4.535600014445914e-05,
+ "loss": 1.392,
+ "step": 2050
+ },
+ {
+ "epoch": 2.0515770060329466,
+ "grad_norm": 0.5487958192825317,
+ "learning_rate": 4.5268239779840935e-05,
+ "loss": 1.2389,
+ "step": 2051
+ },
+ {
+ "epoch": 2.0525772873620705,
+ "grad_norm": 0.6395775675773621,
+ "learning_rate": 4.518053955552903e-05,
+ "loss": 1.3683,
+ "step": 2052
+ },
+ {
+ "epoch": 2.0535775686911943,
+ "grad_norm": 0.5679852366447449,
+ "learning_rate": 4.5092899567890035e-05,
+ "loss": 1.1907,
+ "step": 2053
+ },
+ {
+ "epoch": 2.0545778500203182,
+ "grad_norm": 0.6236241459846497,
+ "learning_rate": 4.5005319913224506e-05,
+ "loss": 1.1334,
+ "step": 2054
+ },
+ {
+ "epoch": 2.055578131349442,
+ "grad_norm": 0.7079814672470093,
+ "learning_rate": 4.491780068776663e-05,
+ "loss": 1.2638,
+ "step": 2055
+ },
+ {
+ "epoch": 2.056578412678566,
+ "grad_norm": 0.6677514910697937,
+ "learning_rate": 4.4830341987684166e-05,
+ "loss": 1.11,
+ "step": 2056
+ },
+ {
+ "epoch": 2.0575786940076894,
+ "grad_norm": 0.7721594572067261,
+ "learning_rate": 4.474294390907847e-05,
+ "loss": 1.2271,
+ "step": 2057
+ },
+ {
+ "epoch": 2.0585789753368133,
+ "grad_norm": 1.6144977807998657,
+ "learning_rate": 4.465560654798417e-05,
+ "loss": 1.2506,
+ "step": 2058
+ },
+ {
+ "epoch": 2.059579256665937,
+ "grad_norm": 0.7923681735992432,
+ "learning_rate": 4.4568330000369286e-05,
+ "loss": 1.1595,
+ "step": 2059
+ },
+ {
+ "epoch": 2.060579537995061,
+ "grad_norm": 0.6133561730384827,
+ "learning_rate": 4.448111436213486e-05,
+ "loss": 1.2743,
+ "step": 2060
+ },
+ {
+ "epoch": 2.061579819324185,
+ "grad_norm": 0.6275729537010193,
+ "learning_rate": 4.4393959729115244e-05,
+ "loss": 1.2725,
+ "step": 2061
+ },
+ {
+ "epoch": 2.062580100653309,
+ "grad_norm": 0.6153738498687744,
+ "learning_rate": 4.4306866197077544e-05,
+ "loss": 1.1801,
+ "step": 2062
+ },
+ {
+ "epoch": 2.0635803819824328,
+ "grad_norm": 0.638670027256012,
+ "learning_rate": 4.421983386172178e-05,
+ "loss": 1.2986,
+ "step": 2063
+ },
+ {
+ "epoch": 2.064580663311556,
+ "grad_norm": 0.6230247616767883,
+ "learning_rate": 4.413286281868081e-05,
+ "loss": 1.2258,
+ "step": 2064
+ },
+ {
+ "epoch": 2.06558094464068,
+ "grad_norm": 0.6063693761825562,
+ "learning_rate": 4.404595316352002e-05,
+ "loss": 1.3707,
+ "step": 2065
+ },
+ {
+ "epoch": 2.066581225969804,
+ "grad_norm": 0.5972939729690552,
+ "learning_rate": 4.3959104991737455e-05,
+ "loss": 1.256,
+ "step": 2066
+ },
+ {
+ "epoch": 2.067581507298928,
+ "grad_norm": 0.679032564163208,
+ "learning_rate": 4.387231839876349e-05,
+ "loss": 1.3107,
+ "step": 2067
+ },
+ {
+ "epoch": 2.0685817886280518,
+ "grad_norm": 0.6892005801200867,
+ "learning_rate": 4.3785593479960964e-05,
+ "loss": 1.393,
+ "step": 2068
+ },
+ {
+ "epoch": 2.0695820699571756,
+ "grad_norm": 0.5849416851997375,
+ "learning_rate": 4.369893033062481e-05,
+ "loss": 1.1333,
+ "step": 2069
+ },
+ {
+ "epoch": 2.070582351286299,
+ "grad_norm": 0.6160491108894348,
+ "learning_rate": 4.3612329045982236e-05,
+ "loss": 1.3765,
+ "step": 2070
+ },
+ {
+ "epoch": 2.071582632615423,
+ "grad_norm": 0.6175869703292847,
+ "learning_rate": 4.35257897211923e-05,
+ "loss": 1.383,
+ "step": 2071
+ },
+ {
+ "epoch": 2.072582913944547,
+ "grad_norm": 0.6659654378890991,
+ "learning_rate": 4.343931245134616e-05,
+ "loss": 1.3973,
+ "step": 2072
+ },
+ {
+ "epoch": 2.0735831952736707,
+ "grad_norm": 0.6890555620193481,
+ "learning_rate": 4.335289733146665e-05,
+ "loss": 1.2443,
+ "step": 2073
+ },
+ {
+ "epoch": 2.0745834766027946,
+ "grad_norm": 0.5023195743560791,
+ "learning_rate": 4.326654445650833e-05,
+ "loss": 1.2592,
+ "step": 2074
+ },
+ {
+ "epoch": 2.0755837579319185,
+ "grad_norm": 0.6107515096664429,
+ "learning_rate": 4.3180253921357414e-05,
+ "loss": 1.2949,
+ "step": 2075
+ },
+ {
+ "epoch": 2.076584039261042,
+ "grad_norm": 0.6276577711105347,
+ "learning_rate": 4.309402582083161e-05,
+ "loss": 1.2398,
+ "step": 2076
+ },
+ {
+ "epoch": 2.077584320590166,
+ "grad_norm": 0.6834983825683594,
+ "learning_rate": 4.300786024968003e-05,
+ "loss": 1.213,
+ "step": 2077
+ },
+ {
+ "epoch": 2.0785846019192897,
+ "grad_norm": 0.6413993239402771,
+ "learning_rate": 4.2921757302583e-05,
+ "loss": 1.1989,
+ "step": 2078
+ },
+ {
+ "epoch": 2.0795848832484136,
+ "grad_norm": 0.7132616639137268,
+ "learning_rate": 4.283571707415214e-05,
+ "loss": 1.5454,
+ "step": 2079
+ },
+ {
+ "epoch": 2.0805851645775375,
+ "grad_norm": 0.590647280216217,
+ "learning_rate": 4.274973965893003e-05,
+ "loss": 1.0762,
+ "step": 2080
+ },
+ {
+ "epoch": 2.0815854459066614,
+ "grad_norm": 0.6809400916099548,
+ "learning_rate": 4.266382515139039e-05,
+ "loss": 1.205,
+ "step": 2081
+ },
+ {
+ "epoch": 2.0825857272357853,
+ "grad_norm": 0.6614822745323181,
+ "learning_rate": 4.2577973645937674e-05,
+ "loss": 1.3823,
+ "step": 2082
+ },
+ {
+ "epoch": 2.0835860085649087,
+ "grad_norm": 0.6928898096084595,
+ "learning_rate": 4.2492185236907125e-05,
+ "loss": 1.3693,
+ "step": 2083
+ },
+ {
+ "epoch": 2.0845862898940326,
+ "grad_norm": 0.6004021167755127,
+ "learning_rate": 4.2406460018564765e-05,
+ "loss": 1.3204,
+ "step": 2084
+ },
+ {
+ "epoch": 2.0855865712231565,
+ "grad_norm": 0.5938641428947449,
+ "learning_rate": 4.2320798085107036e-05,
+ "loss": 1.1583,
+ "step": 2085
+ },
+ {
+ "epoch": 2.0865868525522804,
+ "grad_norm": 0.6949739456176758,
+ "learning_rate": 4.223519953066099e-05,
+ "loss": 1.2057,
+ "step": 2086
+ },
+ {
+ "epoch": 2.0875871338814043,
+ "grad_norm": 0.6152746081352234,
+ "learning_rate": 4.214966444928387e-05,
+ "loss": 1.3205,
+ "step": 2087
+ },
+ {
+ "epoch": 2.088587415210528,
+ "grad_norm": 0.6244688630104065,
+ "learning_rate": 4.206419293496333e-05,
+ "loss": 1.2569,
+ "step": 2088
+ },
+ {
+ "epoch": 2.0895876965396516,
+ "grad_norm": 0.6392861008644104,
+ "learning_rate": 4.1978785081617057e-05,
+ "loss": 1.4032,
+ "step": 2089
+ },
+ {
+ "epoch": 2.0905879778687755,
+ "grad_norm": 0.6255760192871094,
+ "learning_rate": 4.1893440983092856e-05,
+ "loss": 1.3172,
+ "step": 2090
+ },
+ {
+ "epoch": 2.0915882591978994,
+ "grad_norm": 0.5918805599212646,
+ "learning_rate": 4.18081607331685e-05,
+ "loss": 1.1431,
+ "step": 2091
+ },
+ {
+ "epoch": 2.0925885405270233,
+ "grad_norm": 0.7690648436546326,
+ "learning_rate": 4.172294442555148e-05,
+ "loss": 1.4107,
+ "step": 2092
+ },
+ {
+ "epoch": 2.093588821856147,
+ "grad_norm": 0.6123422384262085,
+ "learning_rate": 4.1637792153879196e-05,
+ "loss": 1.1535,
+ "step": 2093
+ },
+ {
+ "epoch": 2.094589103185271,
+ "grad_norm": 0.6413111090660095,
+ "learning_rate": 4.15527040117185e-05,
+ "loss": 1.1825,
+ "step": 2094
+ },
+ {
+ "epoch": 2.0955893845143945,
+ "grad_norm": 0.7032740712165833,
+ "learning_rate": 4.146768009256595e-05,
+ "loss": 1.3696,
+ "step": 2095
+ },
+ {
+ "epoch": 2.0965896658435184,
+ "grad_norm": 0.6340607404708862,
+ "learning_rate": 4.13827204898474e-05,
+ "loss": 1.2627,
+ "step": 2096
+ },
+ {
+ "epoch": 2.0975899471726422,
+ "grad_norm": 0.5844863057136536,
+ "learning_rate": 4.129782529691815e-05,
+ "loss": 1.228,
+ "step": 2097
+ },
+ {
+ "epoch": 2.098590228501766,
+ "grad_norm": 0.5953666567802429,
+ "learning_rate": 4.1212994607062594e-05,
+ "loss": 1.3636,
+ "step": 2098
+ },
+ {
+ "epoch": 2.09959050983089,
+ "grad_norm": 0.6116781234741211,
+ "learning_rate": 4.1128228513494385e-05,
+ "loss": 1.3428,
+ "step": 2099
+ },
+ {
+ "epoch": 2.100590791160014,
+ "grad_norm": 0.5654290914535522,
+ "learning_rate": 4.1043527109356095e-05,
+ "loss": 1.3565,
+ "step": 2100
+ },
+ {
+ "epoch": 2.1015910724891373,
+ "grad_norm": 0.6171858906745911,
+ "learning_rate": 4.095889048771922e-05,
+ "loss": 1.4972,
+ "step": 2101
+ },
+ {
+ "epoch": 2.1025913538182612,
+ "grad_norm": 0.6021595001220703,
+ "learning_rate": 4.087431874158416e-05,
+ "loss": 1.2339,
+ "step": 2102
+ },
+ {
+ "epoch": 2.103591635147385,
+ "grad_norm": 0.6666091084480286,
+ "learning_rate": 4.0789811963879906e-05,
+ "loss": 1.2795,
+ "step": 2103
+ },
+ {
+ "epoch": 2.104591916476509,
+ "grad_norm": 0.708927571773529,
+ "learning_rate": 4.070537024746416e-05,
+ "loss": 1.2198,
+ "step": 2104
+ },
+ {
+ "epoch": 2.105592197805633,
+ "grad_norm": 0.6773338913917542,
+ "learning_rate": 4.06209936851231e-05,
+ "loss": 1.1575,
+ "step": 2105
+ },
+ {
+ "epoch": 2.106592479134757,
+ "grad_norm": 0.5854949951171875,
+ "learning_rate": 4.053668236957134e-05,
+ "loss": 1.1772,
+ "step": 2106
+ },
+ {
+ "epoch": 2.1075927604638807,
+ "grad_norm": 0.6273494362831116,
+ "learning_rate": 4.0452436393451735e-05,
+ "loss": 1.0734,
+ "step": 2107
+ },
+ {
+ "epoch": 2.108593041793004,
+ "grad_norm": 0.5946929454803467,
+ "learning_rate": 4.036825584933533e-05,
+ "loss": 1.3508,
+ "step": 2108
+ },
+ {
+ "epoch": 2.109593323122128,
+ "grad_norm": 0.696772575378418,
+ "learning_rate": 4.028414082972141e-05,
+ "loss": 1.4982,
+ "step": 2109
+ },
+ {
+ "epoch": 2.110593604451252,
+ "grad_norm": 0.5967583656311035,
+ "learning_rate": 4.020009142703708e-05,
+ "loss": 1.2575,
+ "step": 2110
+ },
+ {
+ "epoch": 2.1115938857803758,
+ "grad_norm": 0.6711663603782654,
+ "learning_rate": 4.011610773363751e-05,
+ "loss": 1.4659,
+ "step": 2111
+ },
+ {
+ "epoch": 2.1125941671094997,
+ "grad_norm": 0.7978873252868652,
+ "learning_rate": 4.003218984180552e-05,
+ "loss": 1.3647,
+ "step": 2112
+ },
+ {
+ "epoch": 2.1135944484386235,
+ "grad_norm": 0.5708624720573425,
+ "learning_rate": 3.994833784375177e-05,
+ "loss": 1.2855,
+ "step": 2113
+ },
+ {
+ "epoch": 2.114594729767747,
+ "grad_norm": 0.6096190810203552,
+ "learning_rate": 3.986455183161437e-05,
+ "loss": 1.2204,
+ "step": 2114
+ },
+ {
+ "epoch": 2.115595011096871,
+ "grad_norm": 0.5597400665283203,
+ "learning_rate": 3.978083189745907e-05,
+ "loss": 1.1564,
+ "step": 2115
+ },
+ {
+ "epoch": 2.1165952924259948,
+ "grad_norm": 0.6057879328727722,
+ "learning_rate": 3.9697178133278855e-05,
+ "loss": 1.4124,
+ "step": 2116
+ },
+ {
+ "epoch": 2.1175955737551186,
+ "grad_norm": 0.6057745814323425,
+ "learning_rate": 3.961359063099416e-05,
+ "loss": 1.2136,
+ "step": 2117
+ },
+ {
+ "epoch": 2.1185958550842425,
+ "grad_norm": 0.589461088180542,
+ "learning_rate": 3.953006948245247e-05,
+ "loss": 1.0236,
+ "step": 2118
+ },
+ {
+ "epoch": 2.1195961364133664,
+ "grad_norm": 0.6324148774147034,
+ "learning_rate": 3.944661477942844e-05,
+ "loss": 1.1293,
+ "step": 2119
+ },
+ {
+ "epoch": 2.1205964177424903,
+ "grad_norm": 0.6559157967567444,
+ "learning_rate": 3.9363226613623736e-05,
+ "loss": 1.1493,
+ "step": 2120
+ },
+ {
+ "epoch": 2.1215966990716137,
+ "grad_norm": 0.616329550743103,
+ "learning_rate": 3.9279905076666826e-05,
+ "loss": 1.2079,
+ "step": 2121
+ },
+ {
+ "epoch": 2.1225969804007376,
+ "grad_norm": 0.9661369919776917,
+ "learning_rate": 3.9196650260113044e-05,
+ "loss": 1.307,
+ "step": 2122
+ },
+ {
+ "epoch": 2.1235972617298615,
+ "grad_norm": 0.7038710713386536,
+ "learning_rate": 3.9113462255444334e-05,
+ "loss": 1.2016,
+ "step": 2123
+ },
+ {
+ "epoch": 2.1245975430589854,
+ "grad_norm": 0.7221149802207947,
+ "learning_rate": 3.903034115406931e-05,
+ "loss": 1.1263,
+ "step": 2124
+ },
+ {
+ "epoch": 2.1255978243881093,
+ "grad_norm": 0.5545951128005981,
+ "learning_rate": 3.8947287047323e-05,
+ "loss": 1.014,
+ "step": 2125
+ },
+ {
+ "epoch": 2.126598105717233,
+ "grad_norm": 0.7009350657463074,
+ "learning_rate": 3.886430002646688e-05,
+ "loss": 1.3541,
+ "step": 2126
+ },
+ {
+ "epoch": 2.1275983870463566,
+ "grad_norm": 0.7145485281944275,
+ "learning_rate": 3.878138018268866e-05,
+ "loss": 1.2772,
+ "step": 2127
+ },
+ {
+ "epoch": 2.1285986683754805,
+ "grad_norm": 0.5328482985496521,
+ "learning_rate": 3.869852760710222e-05,
+ "loss": 1.536,
+ "step": 2128
+ },
+ {
+ "epoch": 2.1295989497046044,
+ "grad_norm": 0.6539137363433838,
+ "learning_rate": 3.861574239074762e-05,
+ "loss": 1.1556,
+ "step": 2129
+ },
+ {
+ "epoch": 2.1305992310337283,
+ "grad_norm": 0.7259630560874939,
+ "learning_rate": 3.8533024624590776e-05,
+ "loss": 1.4835,
+ "step": 2130
+ },
+ {
+ "epoch": 2.131599512362852,
+ "grad_norm": 0.6348355412483215,
+ "learning_rate": 3.845037439952362e-05,
+ "loss": 1.2484,
+ "step": 2131
+ },
+ {
+ "epoch": 2.132599793691976,
+ "grad_norm": 0.5768122673034668,
+ "learning_rate": 3.836779180636373e-05,
+ "loss": 1.1329,
+ "step": 2132
+ },
+ {
+ "epoch": 2.1336000750210995,
+ "grad_norm": 0.6438059210777283,
+ "learning_rate": 3.828527693585451e-05,
+ "loss": 1.2199,
+ "step": 2133
+ },
+ {
+ "epoch": 2.1346003563502234,
+ "grad_norm": 0.631417989730835,
+ "learning_rate": 3.8202829878664816e-05,
+ "loss": 1.1399,
+ "step": 2134
+ },
+ {
+ "epoch": 2.1356006376793473,
+ "grad_norm": 0.6081330180168152,
+ "learning_rate": 3.812045072538909e-05,
+ "loss": 1.2772,
+ "step": 2135
+ },
+ {
+ "epoch": 2.136600919008471,
+ "grad_norm": 0.5720836520195007,
+ "learning_rate": 3.8038139566547146e-05,
+ "loss": 1.1365,
+ "step": 2136
+ },
+ {
+ "epoch": 2.137601200337595,
+ "grad_norm": 0.7470606565475464,
+ "learning_rate": 3.7955896492584e-05,
+ "loss": 1.2519,
+ "step": 2137
+ },
+ {
+ "epoch": 2.138601481666719,
+ "grad_norm": 0.6497467160224915,
+ "learning_rate": 3.787372159386999e-05,
+ "loss": 1.4294,
+ "step": 2138
+ },
+ {
+ "epoch": 2.1396017629958424,
+ "grad_norm": 0.7447611093521118,
+ "learning_rate": 3.7791614960700395e-05,
+ "loss": 1.2722,
+ "step": 2139
+ },
+ {
+ "epoch": 2.1406020443249663,
+ "grad_norm": 0.7185319066047668,
+ "learning_rate": 3.770957668329562e-05,
+ "loss": 1.436,
+ "step": 2140
+ },
+ {
+ "epoch": 2.14160232565409,
+ "grad_norm": 0.5746670365333557,
+ "learning_rate": 3.7627606851800837e-05,
+ "loss": 1.0138,
+ "step": 2141
+ },
+ {
+ "epoch": 2.142602606983214,
+ "grad_norm": 0.568533182144165,
+ "learning_rate": 3.7545705556286126e-05,
+ "loss": 1.4015,
+ "step": 2142
+ },
+ {
+ "epoch": 2.143602888312338,
+ "grad_norm": 0.5806247591972351,
+ "learning_rate": 3.746387288674613e-05,
+ "loss": 1.2511,
+ "step": 2143
+ },
+ {
+ "epoch": 2.144603169641462,
+ "grad_norm": 0.6733903288841248,
+ "learning_rate": 3.7382108933100234e-05,
+ "loss": 1.4226,
+ "step": 2144
+ },
+ {
+ "epoch": 2.1456034509705857,
+ "grad_norm": 0.6690333485603333,
+ "learning_rate": 3.730041378519216e-05,
+ "loss": 1.4497,
+ "step": 2145
+ },
+ {
+ "epoch": 2.146603732299709,
+ "grad_norm": 0.6218042969703674,
+ "learning_rate": 3.721878753279017e-05,
+ "loss": 1.3474,
+ "step": 2146
+ },
+ {
+ "epoch": 2.147604013628833,
+ "grad_norm": 0.5649903416633606,
+ "learning_rate": 3.713723026558671e-05,
+ "loss": 1.175,
+ "step": 2147
+ },
+ {
+ "epoch": 2.148604294957957,
+ "grad_norm": 0.6123588681221008,
+ "learning_rate": 3.705574207319844e-05,
+ "loss": 1.3606,
+ "step": 2148
+ },
+ {
+ "epoch": 2.149604576287081,
+ "grad_norm": 0.7542588710784912,
+ "learning_rate": 3.697432304516618e-05,
+ "loss": 1.3657,
+ "step": 2149
+ },
+ {
+ "epoch": 2.1506048576162047,
+ "grad_norm": 0.695570170879364,
+ "learning_rate": 3.689297327095472e-05,
+ "loss": 1.2842,
+ "step": 2150
+ },
+ {
+ "epoch": 2.1516051389453286,
+ "grad_norm": 0.6117271780967712,
+ "learning_rate": 3.681169283995279e-05,
+ "loss": 1.2964,
+ "step": 2151
+ },
+ {
+ "epoch": 2.152605420274452,
+ "grad_norm": 0.6640071272850037,
+ "learning_rate": 3.673048184147281e-05,
+ "loss": 1.3073,
+ "step": 2152
+ },
+ {
+ "epoch": 2.153605701603576,
+ "grad_norm": 0.7579565644264221,
+ "learning_rate": 3.664934036475104e-05,
+ "loss": 1.309,
+ "step": 2153
+ },
+ {
+ "epoch": 2.1546059829327,
+ "grad_norm": 0.6235459446907043,
+ "learning_rate": 3.656826849894726e-05,
+ "loss": 1.11,
+ "step": 2154
+ },
+ {
+ "epoch": 2.1556062642618237,
+ "grad_norm": 0.5683732628822327,
+ "learning_rate": 3.648726633314475e-05,
+ "loss": 1.2784,
+ "step": 2155
+ },
+ {
+ "epoch": 2.1566065455909476,
+ "grad_norm": 0.8360922932624817,
+ "learning_rate": 3.640633395635032e-05,
+ "loss": 1.1362,
+ "step": 2156
+ },
+ {
+ "epoch": 2.1576068269200714,
+ "grad_norm": 0.5839766263961792,
+ "learning_rate": 3.632547145749395e-05,
+ "loss": 1.2103,
+ "step": 2157
+ },
+ {
+ "epoch": 2.158607108249195,
+ "grad_norm": 0.6480291485786438,
+ "learning_rate": 3.624467892542895e-05,
+ "loss": 1.0706,
+ "step": 2158
+ },
+ {
+ "epoch": 2.1596073895783188,
+ "grad_norm": 0.5837631821632385,
+ "learning_rate": 3.616395644893166e-05,
+ "loss": 1.2327,
+ "step": 2159
+ },
+ {
+ "epoch": 2.1606076709074427,
+ "grad_norm": 0.6498702168464661,
+ "learning_rate": 3.6083304116701535e-05,
+ "loss": 1.2411,
+ "step": 2160
+ },
+ {
+ "epoch": 2.1616079522365665,
+ "grad_norm": 0.7082285284996033,
+ "learning_rate": 3.600272201736082e-05,
+ "loss": 1.288,
+ "step": 2161
+ },
+ {
+ "epoch": 2.1626082335656904,
+ "grad_norm": 0.7018481492996216,
+ "learning_rate": 3.5922210239454764e-05,
+ "loss": 1.2828,
+ "step": 2162
+ },
+ {
+ "epoch": 2.1636085148948143,
+ "grad_norm": 0.6113139390945435,
+ "learning_rate": 3.5841768871451185e-05,
+ "loss": 1.341,
+ "step": 2163
+ },
+ {
+ "epoch": 2.1646087962239378,
+ "grad_norm": 0.5757367014884949,
+ "learning_rate": 3.57613980017406e-05,
+ "loss": 1.4316,
+ "step": 2164
+ },
+ {
+ "epoch": 2.1656090775530616,
+ "grad_norm": 0.7200785875320435,
+ "learning_rate": 3.568109771863613e-05,
+ "loss": 1.304,
+ "step": 2165
+ },
+ {
+ "epoch": 2.1666093588821855,
+ "grad_norm": 0.5863134264945984,
+ "learning_rate": 3.560086811037316e-05,
+ "loss": 1.2712,
+ "step": 2166
+ },
+ {
+ "epoch": 2.1676096402113094,
+ "grad_norm": 0.6226006746292114,
+ "learning_rate": 3.552070926510962e-05,
+ "loss": 1.2146,
+ "step": 2167
+ },
+ {
+ "epoch": 2.1686099215404333,
+ "grad_norm": 0.6331388354301453,
+ "learning_rate": 3.54406212709255e-05,
+ "loss": 1.3175,
+ "step": 2168
+ },
+ {
+ "epoch": 2.169610202869557,
+ "grad_norm": 0.6935258507728577,
+ "learning_rate": 3.536060421582309e-05,
+ "loss": 1.3687,
+ "step": 2169
+ },
+ {
+ "epoch": 2.170610484198681,
+ "grad_norm": 0.6996686458587646,
+ "learning_rate": 3.52806581877266e-05,
+ "loss": 1.4386,
+ "step": 2170
+ },
+ {
+ "epoch": 2.1716107655278045,
+ "grad_norm": 0.695606529712677,
+ "learning_rate": 3.520078327448232e-05,
+ "loss": 1.1679,
+ "step": 2171
+ },
+ {
+ "epoch": 2.1726110468569284,
+ "grad_norm": 0.6625364422798157,
+ "learning_rate": 3.5120979563858266e-05,
+ "loss": 1.2364,
+ "step": 2172
+ },
+ {
+ "epoch": 2.1736113281860523,
+ "grad_norm": 0.5756232142448425,
+ "learning_rate": 3.5041247143544364e-05,
+ "loss": 1.2565,
+ "step": 2173
+ },
+ {
+ "epoch": 2.174611609515176,
+ "grad_norm": 0.7229663729667664,
+ "learning_rate": 3.496158610115207e-05,
+ "loss": 1.2473,
+ "step": 2174
+ },
+ {
+ "epoch": 2.1756118908443,
+ "grad_norm": 0.6331142783164978,
+ "learning_rate": 3.4881996524214445e-05,
+ "loss": 1.2712,
+ "step": 2175
+ },
+ {
+ "epoch": 2.176612172173424,
+ "grad_norm": 0.7331680655479431,
+ "learning_rate": 3.48024785001861e-05,
+ "loss": 1.3736,
+ "step": 2176
+ },
+ {
+ "epoch": 2.1776124535025474,
+ "grad_norm": 0.745099663734436,
+ "learning_rate": 3.472303211644289e-05,
+ "loss": 1.3546,
+ "step": 2177
+ },
+ {
+ "epoch": 2.1786127348316713,
+ "grad_norm": 0.5532922148704529,
+ "learning_rate": 3.464365746028208e-05,
+ "loss": 1.2695,
+ "step": 2178
+ },
+ {
+ "epoch": 2.179613016160795,
+ "grad_norm": 0.671463131904602,
+ "learning_rate": 3.456435461892203e-05,
+ "loss": 1.1332,
+ "step": 2179
+ },
+ {
+ "epoch": 2.180613297489919,
+ "grad_norm": 0.6440550088882446,
+ "learning_rate": 3.4485123679502274e-05,
+ "loss": 1.2519,
+ "step": 2180
+ },
+ {
+ "epoch": 2.181613578819043,
+ "grad_norm": 0.5422050952911377,
+ "learning_rate": 3.4405964729083254e-05,
+ "loss": 1.3829,
+ "step": 2181
+ },
+ {
+ "epoch": 2.182613860148167,
+ "grad_norm": 0.5920416712760925,
+ "learning_rate": 3.43268778546463e-05,
+ "loss": 1.2442,
+ "step": 2182
+ },
+ {
+ "epoch": 2.1836141414772907,
+ "grad_norm": 0.5842954516410828,
+ "learning_rate": 3.424786314309365e-05,
+ "loss": 1.1633,
+ "step": 2183
+ },
+ {
+ "epoch": 2.184614422806414,
+ "grad_norm": 0.6093557476997375,
+ "learning_rate": 3.416892068124812e-05,
+ "loss": 1.3949,
+ "step": 2184
+ },
+ {
+ "epoch": 2.185614704135538,
+ "grad_norm": 0.6957736015319824,
+ "learning_rate": 3.409005055585327e-05,
+ "loss": 1.3018,
+ "step": 2185
+ },
+ {
+ "epoch": 2.186614985464662,
+ "grad_norm": 0.6803373098373413,
+ "learning_rate": 3.401125285357302e-05,
+ "loss": 1.1555,
+ "step": 2186
+ },
+ {
+ "epoch": 2.187615266793786,
+ "grad_norm": 0.6370871067047119,
+ "learning_rate": 3.393252766099187e-05,
+ "loss": 1.3282,
+ "step": 2187
+ },
+ {
+ "epoch": 2.1886155481229097,
+ "grad_norm": 0.6632930040359497,
+ "learning_rate": 3.3853875064614515e-05,
+ "loss": 1.3476,
+ "step": 2188
+ },
+ {
+ "epoch": 2.1896158294520336,
+ "grad_norm": 0.6875565648078918,
+ "learning_rate": 3.377529515086598e-05,
+ "loss": 1.1416,
+ "step": 2189
+ },
+ {
+ "epoch": 2.190616110781157,
+ "grad_norm": 0.6737555265426636,
+ "learning_rate": 3.369678800609134e-05,
+ "loss": 1.2395,
+ "step": 2190
+ },
+ {
+ "epoch": 2.191616392110281,
+ "grad_norm": 0.6693467497825623,
+ "learning_rate": 3.361835371655578e-05,
+ "loss": 1.2718,
+ "step": 2191
+ },
+ {
+ "epoch": 2.192616673439405,
+ "grad_norm": 0.6017836928367615,
+ "learning_rate": 3.353999236844436e-05,
+ "loss": 1.1916,
+ "step": 2192
+ },
+ {
+ "epoch": 2.1936169547685287,
+ "grad_norm": 0.6228927969932556,
+ "learning_rate": 3.3461704047862054e-05,
+ "loss": 1.3403,
+ "step": 2193
+ },
+ {
+ "epoch": 2.1946172360976526,
+ "grad_norm": 0.5871831178665161,
+ "learning_rate": 3.33834888408336e-05,
+ "loss": 1.1762,
+ "step": 2194
+ },
+ {
+ "epoch": 2.1956175174267765,
+ "grad_norm": 0.6408618688583374,
+ "learning_rate": 3.3305346833303296e-05,
+ "loss": 1.4061,
+ "step": 2195
+ },
+ {
+ "epoch": 2.1966177987559,
+ "grad_norm": 0.6841285824775696,
+ "learning_rate": 3.322727811113516e-05,
+ "loss": 1.4666,
+ "step": 2196
+ },
+ {
+ "epoch": 2.197618080085024,
+ "grad_norm": 0.5706741809844971,
+ "learning_rate": 3.314928276011251e-05,
+ "loss": 1.3368,
+ "step": 2197
+ },
+ {
+ "epoch": 2.1986183614141477,
+ "grad_norm": 0.6349079012870789,
+ "learning_rate": 3.307136086593821e-05,
+ "loss": 1.2458,
+ "step": 2198
+ },
+ {
+ "epoch": 2.1996186427432716,
+ "grad_norm": 0.6701605916023254,
+ "learning_rate": 3.299351251423426e-05,
+ "loss": 1.094,
+ "step": 2199
+ },
+ {
+ "epoch": 2.2006189240723955,
+ "grad_norm": 0.706358015537262,
+ "learning_rate": 3.291573779054199e-05,
+ "loss": 1.4859,
+ "step": 2200
+ },
+ {
+ "epoch": 2.2016192054015193,
+ "grad_norm": 0.6330355405807495,
+ "learning_rate": 3.2838036780321715e-05,
+ "loss": 1.4211,
+ "step": 2201
+ },
+ {
+ "epoch": 2.202619486730643,
+ "grad_norm": 0.6275076866149902,
+ "learning_rate": 3.2760409568952766e-05,
+ "loss": 1.3685,
+ "step": 2202
+ },
+ {
+ "epoch": 2.2036197680597667,
+ "grad_norm": 0.688566267490387,
+ "learning_rate": 3.268285624173347e-05,
+ "loss": 1.3916,
+ "step": 2203
+ },
+ {
+ "epoch": 2.2046200493888906,
+ "grad_norm": 0.569244921207428,
+ "learning_rate": 3.260537688388086e-05,
+ "loss": 1.1975,
+ "step": 2204
+ },
+ {
+ "epoch": 2.2056203307180144,
+ "grad_norm": 0.6800128817558289,
+ "learning_rate": 3.252797158053077e-05,
+ "loss": 1.4201,
+ "step": 2205
+ },
+ {
+ "epoch": 2.2066206120471383,
+ "grad_norm": 0.7519281506538391,
+ "learning_rate": 3.24506404167376e-05,
+ "loss": 1.2746,
+ "step": 2206
+ },
+ {
+ "epoch": 2.207620893376262,
+ "grad_norm": 0.6840569972991943,
+ "learning_rate": 3.2373383477474354e-05,
+ "loss": 1.1698,
+ "step": 2207
+ },
+ {
+ "epoch": 2.208621174705386,
+ "grad_norm": 0.6477528810501099,
+ "learning_rate": 3.229620084763237e-05,
+ "loss": 1.2197,
+ "step": 2208
+ },
+ {
+ "epoch": 2.2096214560345095,
+ "grad_norm": 0.6310058832168579,
+ "learning_rate": 3.221909261202146e-05,
+ "loss": 1.3451,
+ "step": 2209
+ },
+ {
+ "epoch": 2.2106217373636334,
+ "grad_norm": 0.6499499678611755,
+ "learning_rate": 3.214205885536965e-05,
+ "loss": 1.344,
+ "step": 2210
+ },
+ {
+ "epoch": 2.2116220186927573,
+ "grad_norm": 0.6838659644126892,
+ "learning_rate": 3.2065099662323017e-05,
+ "loss": 1.1809,
+ "step": 2211
+ },
+ {
+ "epoch": 2.212622300021881,
+ "grad_norm": 0.5906123518943787,
+ "learning_rate": 3.1988215117445896e-05,
+ "loss": 1.099,
+ "step": 2212
+ },
+ {
+ "epoch": 2.213622581351005,
+ "grad_norm": 0.6088157892227173,
+ "learning_rate": 3.191140530522041e-05,
+ "loss": 1.26,
+ "step": 2213
+ },
+ {
+ "epoch": 2.214622862680129,
+ "grad_norm": 0.6145564317703247,
+ "learning_rate": 3.1834670310046734e-05,
+ "loss": 1.3161,
+ "step": 2214
+ },
+ {
+ "epoch": 2.2156231440092524,
+ "grad_norm": 0.6671096682548523,
+ "learning_rate": 3.1758010216242664e-05,
+ "loss": 1.2084,
+ "step": 2215
+ },
+ {
+ "epoch": 2.2166234253383763,
+ "grad_norm": 0.590133547782898,
+ "learning_rate": 3.168142510804386e-05,
+ "loss": 1.2268,
+ "step": 2216
+ },
+ {
+ "epoch": 2.2176237066675,
+ "grad_norm": 0.6781971454620361,
+ "learning_rate": 3.1604915069603436e-05,
+ "loss": 1.4609,
+ "step": 2217
+ },
+ {
+ "epoch": 2.218623987996624,
+ "grad_norm": 0.6087194085121155,
+ "learning_rate": 3.152848018499215e-05,
+ "loss": 1.2203,
+ "step": 2218
+ },
+ {
+ "epoch": 2.219624269325748,
+ "grad_norm": 0.8006445169448853,
+ "learning_rate": 3.145212053819806e-05,
+ "loss": 1.2055,
+ "step": 2219
+ },
+ {
+ "epoch": 2.220624550654872,
+ "grad_norm": 0.7006067633628845,
+ "learning_rate": 3.137583621312665e-05,
+ "loss": 1.4365,
+ "step": 2220
+ },
+ {
+ "epoch": 2.2216248319839953,
+ "grad_norm": 0.5759941339492798,
+ "learning_rate": 3.1299627293600595e-05,
+ "loss": 1.2277,
+ "step": 2221
+ },
+ {
+ "epoch": 2.222625113313119,
+ "grad_norm": 0.765470564365387,
+ "learning_rate": 3.122349386335964e-05,
+ "loss": 1.3485,
+ "step": 2222
+ },
+ {
+ "epoch": 2.223625394642243,
+ "grad_norm": 0.6199076175689697,
+ "learning_rate": 3.114743600606078e-05,
+ "loss": 1.0602,
+ "step": 2223
+ },
+ {
+ "epoch": 2.224625675971367,
+ "grad_norm": 0.6438413858413696,
+ "learning_rate": 3.107145380527776e-05,
+ "loss": 1.3,
+ "step": 2224
+ },
+ {
+ "epoch": 2.225625957300491,
+ "grad_norm": 0.6275787949562073,
+ "learning_rate": 3.099554734450133e-05,
+ "loss": 1.1412,
+ "step": 2225
+ },
+ {
+ "epoch": 2.2266262386296147,
+ "grad_norm": 0.592475950717926,
+ "learning_rate": 3.091971670713889e-05,
+ "loss": 1.3188,
+ "step": 2226
+ },
+ {
+ "epoch": 2.227626519958738,
+ "grad_norm": 0.6204854249954224,
+ "learning_rate": 3.084396197651468e-05,
+ "loss": 1.3298,
+ "step": 2227
+ },
+ {
+ "epoch": 2.228626801287862,
+ "grad_norm": 0.6952534914016724,
+ "learning_rate": 3.076828323586941e-05,
+ "loss": 1.4366,
+ "step": 2228
+ },
+ {
+ "epoch": 2.229627082616986,
+ "grad_norm": 0.7046433091163635,
+ "learning_rate": 3.06926805683603e-05,
+ "loss": 1.2676,
+ "step": 2229
+ },
+ {
+ "epoch": 2.23062736394611,
+ "grad_norm": 0.7284234166145325,
+ "learning_rate": 3.061715405706106e-05,
+ "loss": 1.4308,
+ "step": 2230
+ },
+ {
+ "epoch": 2.2316276452752337,
+ "grad_norm": 0.7029892206192017,
+ "learning_rate": 3.0541703784961615e-05,
+ "loss": 1.2041,
+ "step": 2231
+ },
+ {
+ "epoch": 2.2326279266043576,
+ "grad_norm": 0.5847492218017578,
+ "learning_rate": 3.0466329834968233e-05,
+ "loss": 1.3313,
+ "step": 2232
+ },
+ {
+ "epoch": 2.2336282079334815,
+ "grad_norm": 0.6810020208358765,
+ "learning_rate": 3.0391032289903188e-05,
+ "loss": 1.3585,
+ "step": 2233
+ },
+ {
+ "epoch": 2.234628489262605,
+ "grad_norm": 0.5531882643699646,
+ "learning_rate": 3.0315811232504922e-05,
+ "loss": 1.1797,
+ "step": 2234
+ },
+ {
+ "epoch": 2.235628770591729,
+ "grad_norm": 0.6164661645889282,
+ "learning_rate": 3.0240666745427713e-05,
+ "loss": 1.049,
+ "step": 2235
+ },
+ {
+ "epoch": 2.2366290519208527,
+ "grad_norm": 0.5976455807685852,
+ "learning_rate": 3.0165598911241832e-05,
+ "loss": 1.0369,
+ "step": 2236
+ },
+ {
+ "epoch": 2.2376293332499766,
+ "grad_norm": 0.7219393849372864,
+ "learning_rate": 3.009060781243319e-05,
+ "loss": 1.3881,
+ "step": 2237
+ },
+ {
+ "epoch": 2.2386296145791005,
+ "grad_norm": 0.6040582656860352,
+ "learning_rate": 3.0015693531403465e-05,
+ "loss": 1.1733,
+ "step": 2238
+ },
+ {
+ "epoch": 2.2396298959082244,
+ "grad_norm": 0.7944032549858093,
+ "learning_rate": 2.994085615046993e-05,
+ "loss": 1.3881,
+ "step": 2239
+ },
+ {
+ "epoch": 2.240630177237348,
+ "grad_norm": 0.7312778234481812,
+ "learning_rate": 2.9866095751865297e-05,
+ "loss": 1.4463,
+ "step": 2240
+ },
+ {
+ "epoch": 2.2416304585664717,
+ "grad_norm": 0.7543103098869324,
+ "learning_rate": 2.979141241773775e-05,
+ "loss": 1.3234,
+ "step": 2241
+ },
+ {
+ "epoch": 2.2426307398955956,
+ "grad_norm": 0.6693204045295715,
+ "learning_rate": 2.971680623015074e-05,
+ "loss": 1.2764,
+ "step": 2242
+ },
+ {
+ "epoch": 2.2436310212247195,
+ "grad_norm": 0.5705234408378601,
+ "learning_rate": 2.9642277271083008e-05,
+ "loss": 1.2161,
+ "step": 2243
+ },
+ {
+ "epoch": 2.2446313025538434,
+ "grad_norm": 0.6513181328773499,
+ "learning_rate": 2.9567825622428358e-05,
+ "loss": 1.3429,
+ "step": 2244
+ },
+ {
+ "epoch": 2.2456315838829672,
+ "grad_norm": 0.684223473072052,
+ "learning_rate": 2.9493451365995737e-05,
+ "loss": 1.2892,
+ "step": 2245
+ },
+ {
+ "epoch": 2.246631865212091,
+ "grad_norm": 0.6515152454376221,
+ "learning_rate": 2.9419154583508978e-05,
+ "loss": 1.3391,
+ "step": 2246
+ },
+ {
+ "epoch": 2.2476321465412146,
+ "grad_norm": 0.7026727199554443,
+ "learning_rate": 2.9344935356606773e-05,
+ "loss": 1.191,
+ "step": 2247
+ },
+ {
+ "epoch": 2.2486324278703385,
+ "grad_norm": 0.6431560516357422,
+ "learning_rate": 2.9270793766842697e-05,
+ "loss": 1.1315,
+ "step": 2248
+ },
+ {
+ "epoch": 2.2496327091994623,
+ "grad_norm": 0.6432946920394897,
+ "learning_rate": 2.9196729895684884e-05,
+ "loss": 1.2303,
+ "step": 2249
+ },
+ {
+ "epoch": 2.2506329905285862,
+ "grad_norm": 0.5639371871948242,
+ "learning_rate": 2.9122743824516195e-05,
+ "loss": 1.2342,
+ "step": 2250
+ },
+ {
+ "epoch": 2.25163327185771,
+ "grad_norm": 0.5914719700813293,
+ "learning_rate": 2.9048835634633887e-05,
+ "loss": 1.1854,
+ "step": 2251
+ },
+ {
+ "epoch": 2.2526335531868336,
+ "grad_norm": 0.6893708109855652,
+ "learning_rate": 2.897500540724972e-05,
+ "loss": 1.3816,
+ "step": 2252
+ },
+ {
+ "epoch": 2.2536338345159574,
+ "grad_norm": 0.5631057620048523,
+ "learning_rate": 2.8901253223489754e-05,
+ "loss": 1.1757,
+ "step": 2253
+ },
+ {
+ "epoch": 2.2546341158450813,
+ "grad_norm": 0.7970195412635803,
+ "learning_rate": 2.8827579164394347e-05,
+ "loss": 1.4732,
+ "step": 2254
+ },
+ {
+ "epoch": 2.255634397174205,
+ "grad_norm": 0.6335131525993347,
+ "learning_rate": 2.875398331091792e-05,
+ "loss": 1.2495,
+ "step": 2255
+ },
+ {
+ "epoch": 2.256634678503329,
+ "grad_norm": 0.7421322464942932,
+ "learning_rate": 2.8680465743928985e-05,
+ "loss": 1.3954,
+ "step": 2256
+ },
+ {
+ "epoch": 2.257634959832453,
+ "grad_norm": 0.6872101426124573,
+ "learning_rate": 2.8607026544210114e-05,
+ "loss": 1.2237,
+ "step": 2257
+ },
+ {
+ "epoch": 2.258635241161577,
+ "grad_norm": 0.7055836915969849,
+ "learning_rate": 2.8533665792457644e-05,
+ "loss": 1.3697,
+ "step": 2258
+ },
+ {
+ "epoch": 2.2596355224907003,
+ "grad_norm": 0.6329922080039978,
+ "learning_rate": 2.8460383569281824e-05,
+ "loss": 1.1614,
+ "step": 2259
+ },
+ {
+ "epoch": 2.260635803819824,
+ "grad_norm": 0.7038082480430603,
+ "learning_rate": 2.8387179955206523e-05,
+ "loss": 1.3674,
+ "step": 2260
+ },
+ {
+ "epoch": 2.261636085148948,
+ "grad_norm": 0.6976034045219421,
+ "learning_rate": 2.831405503066932e-05,
+ "loss": 1.4164,
+ "step": 2261
+ },
+ {
+ "epoch": 2.262636366478072,
+ "grad_norm": 0.6086018681526184,
+ "learning_rate": 2.8241008876021215e-05,
+ "loss": 1.1655,
+ "step": 2262
+ },
+ {
+ "epoch": 2.263636647807196,
+ "grad_norm": 0.6900399327278137,
+ "learning_rate": 2.8168041571526805e-05,
+ "loss": 1.3057,
+ "step": 2263
+ },
+ {
+ "epoch": 2.2646369291363198,
+ "grad_norm": 0.6113269925117493,
+ "learning_rate": 2.8095153197363887e-05,
+ "loss": 1.2436,
+ "step": 2264
+ },
+ {
+ "epoch": 2.265637210465443,
+ "grad_norm": 0.5578724145889282,
+ "learning_rate": 2.8022343833623666e-05,
+ "loss": 1.1389,
+ "step": 2265
+ },
+ {
+ "epoch": 2.266637491794567,
+ "grad_norm": 0.6085034608840942,
+ "learning_rate": 2.7949613560310438e-05,
+ "loss": 1.0843,
+ "step": 2266
+ },
+ {
+ "epoch": 2.267637773123691,
+ "grad_norm": 0.7718783020973206,
+ "learning_rate": 2.787696245734155e-05,
+ "loss": 1.3021,
+ "step": 2267
+ },
+ {
+ "epoch": 2.268638054452815,
+ "grad_norm": 0.7130104303359985,
+ "learning_rate": 2.7804390604547557e-05,
+ "loss": 1.3628,
+ "step": 2268
+ },
+ {
+ "epoch": 2.2696383357819387,
+ "grad_norm": 0.622186005115509,
+ "learning_rate": 2.7731898081671702e-05,
+ "loss": 1.2327,
+ "step": 2269
+ },
+ {
+ "epoch": 2.2706386171110626,
+ "grad_norm": 0.5734781622886658,
+ "learning_rate": 2.765948496837022e-05,
+ "loss": 1.3696,
+ "step": 2270
+ },
+ {
+ "epoch": 2.2716388984401865,
+ "grad_norm": 0.7205266952514648,
+ "learning_rate": 2.758715134421197e-05,
+ "loss": 1.3049,
+ "step": 2271
+ },
+ {
+ "epoch": 2.27263917976931,
+ "grad_norm": 0.7254026532173157,
+ "learning_rate": 2.7514897288678578e-05,
+ "loss": 1.2403,
+ "step": 2272
+ },
+ {
+ "epoch": 2.273639461098434,
+ "grad_norm": 0.6365311741828918,
+ "learning_rate": 2.744272288116416e-05,
+ "loss": 1.2462,
+ "step": 2273
+ },
+ {
+ "epoch": 2.2746397424275577,
+ "grad_norm": 0.6495359539985657,
+ "learning_rate": 2.7370628200975302e-05,
+ "loss": 1.3269,
+ "step": 2274
+ },
+ {
+ "epoch": 2.2756400237566816,
+ "grad_norm": 0.6046035289764404,
+ "learning_rate": 2.729861332733108e-05,
+ "loss": 1.0957,
+ "step": 2275
+ },
+ {
+ "epoch": 2.2766403050858055,
+ "grad_norm": 0.6337087750434875,
+ "learning_rate": 2.7226678339362755e-05,
+ "loss": 1.1258,
+ "step": 2276
+ },
+ {
+ "epoch": 2.2776405864149294,
+ "grad_norm": 0.6914265751838684,
+ "learning_rate": 2.7154823316113932e-05,
+ "loss": 1.1727,
+ "step": 2277
+ },
+ {
+ "epoch": 2.278640867744053,
+ "grad_norm": 0.5778425335884094,
+ "learning_rate": 2.708304833654023e-05,
+ "loss": 1.2652,
+ "step": 2278
+ },
+ {
+ "epoch": 2.2796411490731767,
+ "grad_norm": 0.6359294056892395,
+ "learning_rate": 2.7011353479509426e-05,
+ "loss": 1.1769,
+ "step": 2279
+ },
+ {
+ "epoch": 2.2806414304023006,
+ "grad_norm": 0.672590434551239,
+ "learning_rate": 2.693973882380114e-05,
+ "loss": 1.5127,
+ "step": 2280
+ },
+ {
+ "epoch": 2.2816417117314245,
+ "grad_norm": 0.6255220770835876,
+ "learning_rate": 2.686820444810696e-05,
+ "loss": 1.0315,
+ "step": 2281
+ },
+ {
+ "epoch": 2.2826419930605484,
+ "grad_norm": 0.6508489847183228,
+ "learning_rate": 2.679675043103026e-05,
+ "loss": 1.3174,
+ "step": 2282
+ },
+ {
+ "epoch": 2.2836422743896723,
+ "grad_norm": 0.6855418086051941,
+ "learning_rate": 2.6725376851086025e-05,
+ "loss": 1.4684,
+ "step": 2283
+ },
+ {
+ "epoch": 2.284642555718796,
+ "grad_norm": 0.5959590673446655,
+ "learning_rate": 2.6654083786700955e-05,
+ "loss": 1.081,
+ "step": 2284
+ },
+ {
+ "epoch": 2.2856428370479196,
+ "grad_norm": 0.6854454874992371,
+ "learning_rate": 2.6582871316213198e-05,
+ "loss": 1.3547,
+ "step": 2285
+ },
+ {
+ "epoch": 2.2866431183770435,
+ "grad_norm": 0.6475023031234741,
+ "learning_rate": 2.6511739517872426e-05,
+ "loss": 1.1388,
+ "step": 2286
+ },
+ {
+ "epoch": 2.2876433997061674,
+ "grad_norm": 0.6125039458274841,
+ "learning_rate": 2.644068846983956e-05,
+ "loss": 1.0777,
+ "step": 2287
+ },
+ {
+ "epoch": 2.2886436810352913,
+ "grad_norm": 0.641226589679718,
+ "learning_rate": 2.6369718250186914e-05,
+ "loss": 1.3094,
+ "step": 2288
+ },
+ {
+ "epoch": 2.289643962364415,
+ "grad_norm": 0.6123669743537903,
+ "learning_rate": 2.6298828936897867e-05,
+ "loss": 1.0834,
+ "step": 2289
+ },
+ {
+ "epoch": 2.2906442436935386,
+ "grad_norm": 0.7510077357292175,
+ "learning_rate": 2.622802060786702e-05,
+ "loss": 1.3612,
+ "step": 2290
+ },
+ {
+ "epoch": 2.2916445250226625,
+ "grad_norm": 0.6886245012283325,
+ "learning_rate": 2.6157293340899857e-05,
+ "loss": 1.2483,
+ "step": 2291
+ },
+ {
+ "epoch": 2.2926448063517864,
+ "grad_norm": 0.5994114279747009,
+ "learning_rate": 2.60866472137129e-05,
+ "loss": 1.2326,
+ "step": 2292
+ },
+ {
+ "epoch": 2.2936450876809102,
+ "grad_norm": 0.6197491884231567,
+ "learning_rate": 2.6016082303933454e-05,
+ "loss": 1.1057,
+ "step": 2293
+ },
+ {
+ "epoch": 2.294645369010034,
+ "grad_norm": 0.7268143892288208,
+ "learning_rate": 2.594559868909956e-05,
+ "loss": 1.4143,
+ "step": 2294
+ },
+ {
+ "epoch": 2.295645650339158,
+ "grad_norm": 0.6396143436431885,
+ "learning_rate": 2.587519644666001e-05,
+ "loss": 1.1143,
+ "step": 2295
+ },
+ {
+ "epoch": 2.296645931668282,
+ "grad_norm": 0.6493250727653503,
+ "learning_rate": 2.580487565397406e-05,
+ "loss": 1.2665,
+ "step": 2296
+ },
+ {
+ "epoch": 2.2976462129974053,
+ "grad_norm": 0.6070438027381897,
+ "learning_rate": 2.573463638831166e-05,
+ "loss": 1.1437,
+ "step": 2297
+ },
+ {
+ "epoch": 2.2986464943265292,
+ "grad_norm": 0.6047723889350891,
+ "learning_rate": 2.566447872685298e-05,
+ "loss": 1.2397,
+ "step": 2298
+ },
+ {
+ "epoch": 2.299646775655653,
+ "grad_norm": 0.6956603527069092,
+ "learning_rate": 2.559440274668864e-05,
+ "loss": 1.4726,
+ "step": 2299
+ },
+ {
+ "epoch": 2.300647056984777,
+ "grad_norm": 0.724605143070221,
+ "learning_rate": 2.5524408524819453e-05,
+ "loss": 1.2286,
+ "step": 2300
+ },
+ {
+ "epoch": 2.301647338313901,
+ "grad_norm": 0.5969868898391724,
+ "learning_rate": 2.545449613815639e-05,
+ "loss": 1.1894,
+ "step": 2301
+ },
+ {
+ "epoch": 2.302647619643025,
+ "grad_norm": 0.5893451571464539,
+ "learning_rate": 2.5384665663520558e-05,
+ "loss": 1.3669,
+ "step": 2302
+ },
+ {
+ "epoch": 2.303647900972148,
+ "grad_norm": 0.6300607323646545,
+ "learning_rate": 2.5314917177642972e-05,
+ "loss": 1.4301,
+ "step": 2303
+ },
+ {
+ "epoch": 2.304648182301272,
+ "grad_norm": 0.5928812026977539,
+ "learning_rate": 2.5245250757164663e-05,
+ "loss": 1.5703,
+ "step": 2304
+ },
+ {
+ "epoch": 2.305648463630396,
+ "grad_norm": 0.5831782221794128,
+ "learning_rate": 2.5175666478636374e-05,
+ "loss": 1.3643,
+ "step": 2305
+ },
+ {
+ "epoch": 2.30664874495952,
+ "grad_norm": 0.7637529373168945,
+ "learning_rate": 2.5106164418518686e-05,
+ "loss": 1.4751,
+ "step": 2306
+ },
+ {
+ "epoch": 2.3076490262886438,
+ "grad_norm": 0.6256185173988342,
+ "learning_rate": 2.5036744653181753e-05,
+ "loss": 1.4159,
+ "step": 2307
+ },
+ {
+ "epoch": 2.3086493076177677,
+ "grad_norm": 0.7415730953216553,
+ "learning_rate": 2.4967407258905385e-05,
+ "loss": 1.2233,
+ "step": 2308
+ },
+ {
+ "epoch": 2.3096495889468915,
+ "grad_norm": 0.639265239238739,
+ "learning_rate": 2.48981523118788e-05,
+ "loss": 1.3055,
+ "step": 2309
+ },
+ {
+ "epoch": 2.310649870276015,
+ "grad_norm": 0.6294757127761841,
+ "learning_rate": 2.4828979888200698e-05,
+ "loss": 1.4187,
+ "step": 2310
+ },
+ {
+ "epoch": 2.311650151605139,
+ "grad_norm": 0.6178464293479919,
+ "learning_rate": 2.475989006387901e-05,
+ "loss": 1.3641,
+ "step": 2311
+ },
+ {
+ "epoch": 2.3126504329342628,
+ "grad_norm": 0.593035876750946,
+ "learning_rate": 2.4690882914831004e-05,
+ "loss": 1.248,
+ "step": 2312
+ },
+ {
+ "epoch": 2.3136507142633866,
+ "grad_norm": 0.6418329477310181,
+ "learning_rate": 2.462195851688306e-05,
+ "loss": 1.2338,
+ "step": 2313
+ },
+ {
+ "epoch": 2.3146509955925105,
+ "grad_norm": 0.579680323600769,
+ "learning_rate": 2.4553116945770583e-05,
+ "loss": 1.2199,
+ "step": 2314
+ },
+ {
+ "epoch": 2.315651276921634,
+ "grad_norm": 0.7126612663269043,
+ "learning_rate": 2.4484358277138065e-05,
+ "loss": 1.3096,
+ "step": 2315
+ },
+ {
+ "epoch": 2.316651558250758,
+ "grad_norm": 0.7200229167938232,
+ "learning_rate": 2.441568258653879e-05,
+ "loss": 1.3489,
+ "step": 2316
+ },
+ {
+ "epoch": 2.3176518395798817,
+ "grad_norm": 0.654485821723938,
+ "learning_rate": 2.4347089949434988e-05,
+ "loss": 1.2585,
+ "step": 2317
+ },
+ {
+ "epoch": 2.3186521209090056,
+ "grad_norm": 0.6139536499977112,
+ "learning_rate": 2.4278580441197484e-05,
+ "loss": 1.3096,
+ "step": 2318
+ },
+ {
+ "epoch": 2.3196524022381295,
+ "grad_norm": 0.6222895383834839,
+ "learning_rate": 2.421015413710591e-05,
+ "loss": 1.2969,
+ "step": 2319
+ },
+ {
+ "epoch": 2.3206526835672534,
+ "grad_norm": 0.73511803150177,
+ "learning_rate": 2.4141811112348377e-05,
+ "loss": 1.3704,
+ "step": 2320
+ },
+ {
+ "epoch": 2.3216529648963773,
+ "grad_norm": 0.5469374060630798,
+ "learning_rate": 2.407355144202147e-05,
+ "loss": 1.267,
+ "step": 2321
+ },
+ {
+ "epoch": 2.3226532462255007,
+ "grad_norm": 0.7390980124473572,
+ "learning_rate": 2.4005375201130274e-05,
+ "loss": 1.1809,
+ "step": 2322
+ },
+ {
+ "epoch": 2.3236535275546246,
+ "grad_norm": 0.6703569889068604,
+ "learning_rate": 2.3937282464588108e-05,
+ "loss": 1.4966,
+ "step": 2323
+ },
+ {
+ "epoch": 2.3246538088837485,
+ "grad_norm": 0.6065950989723206,
+ "learning_rate": 2.3869273307216612e-05,
+ "loss": 1.3386,
+ "step": 2324
+ },
+ {
+ "epoch": 2.3256540902128724,
+ "grad_norm": 0.6592161655426025,
+ "learning_rate": 2.3801347803745512e-05,
+ "loss": 1.4153,
+ "step": 2325
+ },
+ {
+ "epoch": 2.3266543715419963,
+ "grad_norm": 0.7319929003715515,
+ "learning_rate": 2.3733506028812658e-05,
+ "loss": 1.3225,
+ "step": 2326
+ },
+ {
+ "epoch": 2.32765465287112,
+ "grad_norm": 0.8160943984985352,
+ "learning_rate": 2.3665748056963956e-05,
+ "loss": 1.2414,
+ "step": 2327
+ },
+ {
+ "epoch": 2.3286549342002436,
+ "grad_norm": 0.7314315438270569,
+ "learning_rate": 2.3598073962653066e-05,
+ "loss": 1.3169,
+ "step": 2328
+ },
+ {
+ "epoch": 2.3296552155293675,
+ "grad_norm": 0.5869148969650269,
+ "learning_rate": 2.3530483820241656e-05,
+ "loss": 1.1076,
+ "step": 2329
+ },
+ {
+ "epoch": 2.3306554968584914,
+ "grad_norm": 0.6157956123352051,
+ "learning_rate": 2.3462977703999023e-05,
+ "loss": 1.3961,
+ "step": 2330
+ },
+ {
+ "epoch": 2.3316557781876153,
+ "grad_norm": 0.6022456288337708,
+ "learning_rate": 2.339555568810221e-05,
+ "loss": 1.0627,
+ "step": 2331
+ },
+ {
+ "epoch": 2.332656059516739,
+ "grad_norm": 0.5729643702507019,
+ "learning_rate": 2.332821784663578e-05,
+ "loss": 1.0171,
+ "step": 2332
+ },
+ {
+ "epoch": 2.333656340845863,
+ "grad_norm": 0.6669082641601562,
+ "learning_rate": 2.3260964253591898e-05,
+ "loss": 1.3516,
+ "step": 2333
+ },
+ {
+ "epoch": 2.334656622174987,
+ "grad_norm": 0.6814001798629761,
+ "learning_rate": 2.3193794982870044e-05,
+ "loss": 1.181,
+ "step": 2334
+ },
+ {
+ "epoch": 2.3356569035041104,
+ "grad_norm": 0.6704903841018677,
+ "learning_rate": 2.312671010827715e-05,
+ "loss": 1.2422,
+ "step": 2335
+ },
+ {
+ "epoch": 2.3366571848332343,
+ "grad_norm": 0.5573928952217102,
+ "learning_rate": 2.30597097035273e-05,
+ "loss": 1.2298,
+ "step": 2336
+ },
+ {
+ "epoch": 2.337657466162358,
+ "grad_norm": 0.6623947024345398,
+ "learning_rate": 2.29927938422419e-05,
+ "loss": 1.181,
+ "step": 2337
+ },
+ {
+ "epoch": 2.338657747491482,
+ "grad_norm": 0.6784799695014954,
+ "learning_rate": 2.2925962597949302e-05,
+ "loss": 1.4118,
+ "step": 2338
+ },
+ {
+ "epoch": 2.339658028820606,
+ "grad_norm": 0.6137337684631348,
+ "learning_rate": 2.285921604408502e-05,
+ "loss": 1.2455,
+ "step": 2339
+ },
+ {
+ "epoch": 2.34065831014973,
+ "grad_norm": 0.5788987278938293,
+ "learning_rate": 2.2792554253991415e-05,
+ "loss": 1.1512,
+ "step": 2340
+ },
+ {
+ "epoch": 2.3416585914788532,
+ "grad_norm": 0.6471617817878723,
+ "learning_rate": 2.272597730091769e-05,
+ "loss": 1.1491,
+ "step": 2341
+ },
+ {
+ "epoch": 2.342658872807977,
+ "grad_norm": 0.7257328629493713,
+ "learning_rate": 2.2659485258019976e-05,
+ "loss": 1.2717,
+ "step": 2342
+ },
+ {
+ "epoch": 2.343659154137101,
+ "grad_norm": 0.678607702255249,
+ "learning_rate": 2.259307819836093e-05,
+ "loss": 1.3263,
+ "step": 2343
+ },
+ {
+ "epoch": 2.344659435466225,
+ "grad_norm": 0.6241843104362488,
+ "learning_rate": 2.252675619490996e-05,
+ "loss": 1.3422,
+ "step": 2344
+ },
+ {
+ "epoch": 2.345659716795349,
+ "grad_norm": 0.6813640594482422,
+ "learning_rate": 2.2460519320542883e-05,
+ "loss": 1.2869,
+ "step": 2345
+ },
+ {
+ "epoch": 2.3466599981244727,
+ "grad_norm": 0.6423537731170654,
+ "learning_rate": 2.2394367648042102e-05,
+ "loss": 1.2624,
+ "step": 2346
+ },
+ {
+ "epoch": 2.3476602794535966,
+ "grad_norm": 0.6376233100891113,
+ "learning_rate": 2.2328301250096327e-05,
+ "loss": 1.0929,
+ "step": 2347
+ },
+ {
+ "epoch": 2.34866056078272,
+ "grad_norm": 0.6261239647865295,
+ "learning_rate": 2.2262320199300557e-05,
+ "loss": 1.1157,
+ "step": 2348
+ },
+ {
+ "epoch": 2.349660842111844,
+ "grad_norm": 0.6539388298988342,
+ "learning_rate": 2.2196424568156073e-05,
+ "loss": 1.2641,
+ "step": 2349
+ },
+ {
+ "epoch": 2.350661123440968,
+ "grad_norm": 0.6082950234413147,
+ "learning_rate": 2.2130614429070207e-05,
+ "loss": 1.269,
+ "step": 2350
+ },
+ {
+ "epoch": 2.3516614047700917,
+ "grad_norm": 0.6362713575363159,
+ "learning_rate": 2.206488985435645e-05,
+ "loss": 1.1818,
+ "step": 2351
+ },
+ {
+ "epoch": 2.3526616860992156,
+ "grad_norm": 0.6356920599937439,
+ "learning_rate": 2.199925091623418e-05,
+ "loss": 1.3691,
+ "step": 2352
+ },
+ {
+ "epoch": 2.353661967428339,
+ "grad_norm": 0.7058550715446472,
+ "learning_rate": 2.193369768682877e-05,
+ "loss": 1.2488,
+ "step": 2353
+ },
+ {
+ "epoch": 2.354662248757463,
+ "grad_norm": 0.6002040505409241,
+ "learning_rate": 2.1868230238171293e-05,
+ "loss": 1.2003,
+ "step": 2354
+ },
+ {
+ "epoch": 2.3556625300865868,
+ "grad_norm": 0.5998533368110657,
+ "learning_rate": 2.1802848642198692e-05,
+ "loss": 1.317,
+ "step": 2355
+ },
+ {
+ "epoch": 2.3566628114157107,
+ "grad_norm": 0.7087720036506653,
+ "learning_rate": 2.1737552970753526e-05,
+ "loss": 1.3822,
+ "step": 2356
+ },
+ {
+ "epoch": 2.3576630927448345,
+ "grad_norm": 0.6583616733551025,
+ "learning_rate": 2.1672343295583873e-05,
+ "loss": 1.2539,
+ "step": 2357
+ },
+ {
+ "epoch": 2.3586633740739584,
+ "grad_norm": 0.7159150242805481,
+ "learning_rate": 2.160721968834344e-05,
+ "loss": 1.3049,
+ "step": 2358
+ },
+ {
+ "epoch": 2.3596636554030823,
+ "grad_norm": 0.7452221512794495,
+ "learning_rate": 2.154218222059122e-05,
+ "loss": 1.2626,
+ "step": 2359
+ },
+ {
+ "epoch": 2.3606639367322058,
+ "grad_norm": 0.6934370398521423,
+ "learning_rate": 2.1477230963791706e-05,
+ "loss": 1.2278,
+ "step": 2360
+ },
+ {
+ "epoch": 2.3616642180613296,
+ "grad_norm": 0.6518145799636841,
+ "learning_rate": 2.141236598931451e-05,
+ "loss": 1.3014,
+ "step": 2361
+ },
+ {
+ "epoch": 2.3626644993904535,
+ "grad_norm": 0.712419867515564,
+ "learning_rate": 2.1347587368434575e-05,
+ "loss": 1.2546,
+ "step": 2362
+ },
+ {
+ "epoch": 2.3636647807195774,
+ "grad_norm": 0.6252943873405457,
+ "learning_rate": 2.1282895172331817e-05,
+ "loss": 1.3629,
+ "step": 2363
+ },
+ {
+ "epoch": 2.3646650620487013,
+ "grad_norm": 0.7917761206626892,
+ "learning_rate": 2.1218289472091336e-05,
+ "loss": 1.0265,
+ "step": 2364
+ },
+ {
+ "epoch": 2.365665343377825,
+ "grad_norm": 0.6771757006645203,
+ "learning_rate": 2.115377033870305e-05,
+ "loss": 1.141,
+ "step": 2365
+ },
+ {
+ "epoch": 2.3666656247069486,
+ "grad_norm": 0.642998993396759,
+ "learning_rate": 2.1089337843061863e-05,
+ "loss": 1.4206,
+ "step": 2366
+ },
+ {
+ "epoch": 2.3676659060360725,
+ "grad_norm": 0.5679053664207458,
+ "learning_rate": 2.102499205596743e-05,
+ "loss": 1.2185,
+ "step": 2367
+ },
+ {
+ "epoch": 2.3686661873651964,
+ "grad_norm": 0.5789163708686829,
+ "learning_rate": 2.0960733048124083e-05,
+ "loss": 1.0651,
+ "step": 2368
+ },
+ {
+ "epoch": 2.3696664686943203,
+ "grad_norm": 0.621918261051178,
+ "learning_rate": 2.0896560890140913e-05,
+ "loss": 1.3253,
+ "step": 2369
+ },
+ {
+ "epoch": 2.370666750023444,
+ "grad_norm": 0.6738060712814331,
+ "learning_rate": 2.0832475652531447e-05,
+ "loss": 1.2178,
+ "step": 2370
+ },
+ {
+ "epoch": 2.371667031352568,
+ "grad_norm": 0.6864463686943054,
+ "learning_rate": 2.076847740571387e-05,
+ "loss": 1.0683,
+ "step": 2371
+ },
+ {
+ "epoch": 2.372667312681692,
+ "grad_norm": 0.683365523815155,
+ "learning_rate": 2.070456622001059e-05,
+ "loss": 1.2512,
+ "step": 2372
+ },
+ {
+ "epoch": 2.3736675940108154,
+ "grad_norm": 0.7128761410713196,
+ "learning_rate": 2.064074216564852e-05,
+ "loss": 1.3049,
+ "step": 2373
+ },
+ {
+ "epoch": 2.3746678753399393,
+ "grad_norm": 0.7912024259567261,
+ "learning_rate": 2.0577005312758703e-05,
+ "loss": 1.4466,
+ "step": 2374
+ },
+ {
+ "epoch": 2.375668156669063,
+ "grad_norm": 0.642785370349884,
+ "learning_rate": 2.0513355731376395e-05,
+ "loss": 1.349,
+ "step": 2375
+ },
+ {
+ "epoch": 2.376668437998187,
+ "grad_norm": 0.6518558859825134,
+ "learning_rate": 2.0449793491441028e-05,
+ "loss": 1.3863,
+ "step": 2376
+ },
+ {
+ "epoch": 2.377668719327311,
+ "grad_norm": 0.5721689462661743,
+ "learning_rate": 2.0386318662795957e-05,
+ "loss": 1.3933,
+ "step": 2377
+ },
+ {
+ "epoch": 2.3786690006564344,
+ "grad_norm": 0.6657408475875854,
+ "learning_rate": 2.0322931315188586e-05,
+ "loss": 1.1653,
+ "step": 2378
+ },
+ {
+ "epoch": 2.3796692819855583,
+ "grad_norm": 0.6022714972496033,
+ "learning_rate": 2.0259631518270105e-05,
+ "loss": 1.2296,
+ "step": 2379
+ },
+ {
+ "epoch": 2.380669563314682,
+ "grad_norm": 0.7442004680633545,
+ "learning_rate": 2.0196419341595595e-05,
+ "loss": 1.2062,
+ "step": 2380
+ },
+ {
+ "epoch": 2.381669844643806,
+ "grad_norm": 0.5507898926734924,
+ "learning_rate": 2.013329485462374e-05,
+ "loss": 1.1901,
+ "step": 2381
+ },
+ {
+ "epoch": 2.38267012597293,
+ "grad_norm": 0.6922757625579834,
+ "learning_rate": 2.0070258126717e-05,
+ "loss": 1.3193,
+ "step": 2382
+ },
+ {
+ "epoch": 2.383670407302054,
+ "grad_norm": 0.5862204432487488,
+ "learning_rate": 2.000730922714128e-05,
+ "loss": 1.3149,
+ "step": 2383
+ },
+ {
+ "epoch": 2.3846706886311777,
+ "grad_norm": 0.6098889708518982,
+ "learning_rate": 1.9944448225066093e-05,
+ "loss": 1.2777,
+ "step": 2384
+ },
+ {
+ "epoch": 2.385670969960301,
+ "grad_norm": 0.6173763871192932,
+ "learning_rate": 1.9881675189564254e-05,
+ "loss": 1.2043,
+ "step": 2385
+ },
+ {
+ "epoch": 2.386671251289425,
+ "grad_norm": 0.6959353089332581,
+ "learning_rate": 1.981899018961202e-05,
+ "loss": 1.0934,
+ "step": 2386
+ },
+ {
+ "epoch": 2.387671532618549,
+ "grad_norm": 0.6258510947227478,
+ "learning_rate": 1.975639329408887e-05,
+ "loss": 1.2021,
+ "step": 2387
+ },
+ {
+ "epoch": 2.388671813947673,
+ "grad_norm": 0.6541923880577087,
+ "learning_rate": 1.9693884571777432e-05,
+ "loss": 1.1109,
+ "step": 2388
+ },
+ {
+ "epoch": 2.3896720952767967,
+ "grad_norm": 0.6073365807533264,
+ "learning_rate": 1.963146409136354e-05,
+ "loss": 1.1153,
+ "step": 2389
+ },
+ {
+ "epoch": 2.3906723766059206,
+ "grad_norm": 0.5515630841255188,
+ "learning_rate": 1.9569131921435956e-05,
+ "loss": 1.1823,
+ "step": 2390
+ },
+ {
+ "epoch": 2.391672657935044,
+ "grad_norm": 0.7065924406051636,
+ "learning_rate": 1.950688813048652e-05,
+ "loss": 1.1865,
+ "step": 2391
+ },
+ {
+ "epoch": 2.392672939264168,
+ "grad_norm": 0.585737407207489,
+ "learning_rate": 1.944473278690986e-05,
+ "loss": 1.2334,
+ "step": 2392
+ },
+ {
+ "epoch": 2.393673220593292,
+ "grad_norm": 0.6631129384040833,
+ "learning_rate": 1.9382665959003477e-05,
+ "loss": 1.2752,
+ "step": 2393
+ },
+ {
+ "epoch": 2.3946735019224157,
+ "grad_norm": 0.711269199848175,
+ "learning_rate": 1.93206877149676e-05,
+ "loss": 1.1615,
+ "step": 2394
+ },
+ {
+ "epoch": 2.3956737832515396,
+ "grad_norm": 0.640571653842926,
+ "learning_rate": 1.9258798122905064e-05,
+ "loss": 1.2068,
+ "step": 2395
+ },
+ {
+ "epoch": 2.3966740645806635,
+ "grad_norm": 0.6581786274909973,
+ "learning_rate": 1.9196997250821392e-05,
+ "loss": 1.2603,
+ "step": 2396
+ },
+ {
+ "epoch": 2.3976743459097873,
+ "grad_norm": 0.7250068783760071,
+ "learning_rate": 1.913528516662452e-05,
+ "loss": 1.4225,
+ "step": 2397
+ },
+ {
+ "epoch": 2.398674627238911,
+ "grad_norm": 0.5822978615760803,
+ "learning_rate": 1.907366193812491e-05,
+ "loss": 1.0978,
+ "step": 2398
+ },
+ {
+ "epoch": 2.3996749085680347,
+ "grad_norm": 0.7264160513877869,
+ "learning_rate": 1.9012127633035305e-05,
+ "loss": 1.2794,
+ "step": 2399
+ },
+ {
+ "epoch": 2.4006751898971586,
+ "grad_norm": 0.6197434067726135,
+ "learning_rate": 1.895068231897079e-05,
+ "loss": 1.2566,
+ "step": 2400
+ },
+ {
+ "epoch": 2.4016754712262824,
+ "grad_norm": 0.6504030823707581,
+ "learning_rate": 1.8889326063448697e-05,
+ "loss": 1.058,
+ "step": 2401
+ },
+ {
+ "epoch": 2.4026757525554063,
+ "grad_norm": 0.6449025869369507,
+ "learning_rate": 1.8828058933888392e-05,
+ "loss": 1.4013,
+ "step": 2402
+ },
+ {
+ "epoch": 2.40367603388453,
+ "grad_norm": 0.5803359746932983,
+ "learning_rate": 1.8766880997611424e-05,
+ "loss": 1.3732,
+ "step": 2403
+ },
+ {
+ "epoch": 2.4046763152136537,
+ "grad_norm": 0.6420122385025024,
+ "learning_rate": 1.870579232184122e-05,
+ "loss": 1.2435,
+ "step": 2404
+ },
+ {
+ "epoch": 2.4056765965427775,
+ "grad_norm": 0.6524025797843933,
+ "learning_rate": 1.864479297370325e-05,
+ "loss": 1.0731,
+ "step": 2405
+ },
+ {
+ "epoch": 2.4066768778719014,
+ "grad_norm": 0.6313955187797546,
+ "learning_rate": 1.8583883020224724e-05,
+ "loss": 1.315,
+ "step": 2406
+ },
+ {
+ "epoch": 2.4076771592010253,
+ "grad_norm": 0.6714106202125549,
+ "learning_rate": 1.8523062528334688e-05,
+ "loss": 1.207,
+ "step": 2407
+ },
+ {
+ "epoch": 2.408677440530149,
+ "grad_norm": 0.7873541116714478,
+ "learning_rate": 1.8462331564863832e-05,
+ "loss": 1.4697,
+ "step": 2408
+ },
+ {
+ "epoch": 2.409677721859273,
+ "grad_norm": 0.6541167497634888,
+ "learning_rate": 1.8401690196544552e-05,
+ "loss": 1.4379,
+ "step": 2409
+ },
+ {
+ "epoch": 2.410678003188397,
+ "grad_norm": 0.6141494512557983,
+ "learning_rate": 1.834113849001069e-05,
+ "loss": 1.2779,
+ "step": 2410
+ },
+ {
+ "epoch": 2.4116782845175204,
+ "grad_norm": 0.7914630174636841,
+ "learning_rate": 1.8280676511797666e-05,
+ "loss": 1.3245,
+ "step": 2411
+ },
+ {
+ "epoch": 2.4126785658466443,
+ "grad_norm": 0.6244723200798035,
+ "learning_rate": 1.8220304328342252e-05,
+ "loss": 1.3419,
+ "step": 2412
+ },
+ {
+ "epoch": 2.413678847175768,
+ "grad_norm": 0.6117607951164246,
+ "learning_rate": 1.8160022005982515e-05,
+ "loss": 1.2608,
+ "step": 2413
+ },
+ {
+ "epoch": 2.414679128504892,
+ "grad_norm": 0.5759628415107727,
+ "learning_rate": 1.8099829610957863e-05,
+ "loss": 1.1133,
+ "step": 2414
+ },
+ {
+ "epoch": 2.415679409834016,
+ "grad_norm": 0.6035516858100891,
+ "learning_rate": 1.8039727209408842e-05,
+ "loss": 1.2471,
+ "step": 2415
+ },
+ {
+ "epoch": 2.4166796911631394,
+ "grad_norm": 0.6468955278396606,
+ "learning_rate": 1.7979714867377152e-05,
+ "loss": 1.219,
+ "step": 2416
+ },
+ {
+ "epoch": 2.4176799724922633,
+ "grad_norm": 0.7172923684120178,
+ "learning_rate": 1.7919792650805455e-05,
+ "loss": 1.2992,
+ "step": 2417
+ },
+ {
+ "epoch": 2.418680253821387,
+ "grad_norm": 0.5648188591003418,
+ "learning_rate": 1.7859960625537476e-05,
+ "loss": 1.3393,
+ "step": 2418
+ },
+ {
+ "epoch": 2.419680535150511,
+ "grad_norm": 0.5623717904090881,
+ "learning_rate": 1.7800218857317742e-05,
+ "loss": 1.3284,
+ "step": 2419
+ },
+ {
+ "epoch": 2.420680816479635,
+ "grad_norm": 0.5881792306900024,
+ "learning_rate": 1.774056741179171e-05,
+ "loss": 1.0625,
+ "step": 2420
+ },
+ {
+ "epoch": 2.421681097808759,
+ "grad_norm": 0.6556890606880188,
+ "learning_rate": 1.7681006354505493e-05,
+ "loss": 1.1079,
+ "step": 2421
+ },
+ {
+ "epoch": 2.4226813791378827,
+ "grad_norm": 0.7625473737716675,
+ "learning_rate": 1.7621535750905905e-05,
+ "loss": 1.3825,
+ "step": 2422
+ },
+ {
+ "epoch": 2.423681660467006,
+ "grad_norm": 0.5806693434715271,
+ "learning_rate": 1.756215566634043e-05,
+ "loss": 1.2253,
+ "step": 2423
+ },
+ {
+ "epoch": 2.42468194179613,
+ "grad_norm": 0.6054913997650146,
+ "learning_rate": 1.7502866166056986e-05,
+ "loss": 1.2402,
+ "step": 2424
+ },
+ {
+ "epoch": 2.425682223125254,
+ "grad_norm": 0.6507579684257507,
+ "learning_rate": 1.744366731520408e-05,
+ "loss": 1.5156,
+ "step": 2425
+ },
+ {
+ "epoch": 2.426682504454378,
+ "grad_norm": 0.5348602533340454,
+ "learning_rate": 1.7384559178830472e-05,
+ "loss": 1.2455,
+ "step": 2426
+ },
+ {
+ "epoch": 2.4276827857835017,
+ "grad_norm": 0.7310481071472168,
+ "learning_rate": 1.7325541821885384e-05,
+ "loss": 1.3409,
+ "step": 2427
+ },
+ {
+ "epoch": 2.4286830671126256,
+ "grad_norm": 0.6602012515068054,
+ "learning_rate": 1.726661530921815e-05,
+ "loss": 1.3951,
+ "step": 2428
+ },
+ {
+ "epoch": 2.429683348441749,
+ "grad_norm": 0.6798003911972046,
+ "learning_rate": 1.7207779705578375e-05,
+ "loss": 1.4299,
+ "step": 2429
+ },
+ {
+ "epoch": 2.430683629770873,
+ "grad_norm": 0.625441312789917,
+ "learning_rate": 1.7149035075615794e-05,
+ "loss": 1.1568,
+ "step": 2430
+ },
+ {
+ "epoch": 2.431683911099997,
+ "grad_norm": 0.5963711142539978,
+ "learning_rate": 1.709038148388007e-05,
+ "loss": 1.1604,
+ "step": 2431
+ },
+ {
+ "epoch": 2.4326841924291207,
+ "grad_norm": 0.6797822117805481,
+ "learning_rate": 1.7031818994820926e-05,
+ "loss": 1.4374,
+ "step": 2432
+ },
+ {
+ "epoch": 2.4336844737582446,
+ "grad_norm": 0.6096747517585754,
+ "learning_rate": 1.697334767278792e-05,
+ "loss": 1.2968,
+ "step": 2433
+ },
+ {
+ "epoch": 2.4346847550873685,
+ "grad_norm": 0.6281675696372986,
+ "learning_rate": 1.6914967582030493e-05,
+ "loss": 1.1493,
+ "step": 2434
+ },
+ {
+ "epoch": 2.4356850364164924,
+ "grad_norm": 0.6768115162849426,
+ "learning_rate": 1.6856678786697778e-05,
+ "loss": 1.1602,
+ "step": 2435
+ },
+ {
+ "epoch": 2.436685317745616,
+ "grad_norm": 1.0345252752304077,
+ "learning_rate": 1.6798481350838648e-05,
+ "loss": 1.3905,
+ "step": 2436
+ },
+ {
+ "epoch": 2.4376855990747397,
+ "grad_norm": 0.6273905634880066,
+ "learning_rate": 1.6740375338401526e-05,
+ "loss": 1.3156,
+ "step": 2437
+ },
+ {
+ "epoch": 2.4386858804038636,
+ "grad_norm": 0.6700469255447388,
+ "learning_rate": 1.6682360813234444e-05,
+ "loss": 1.1926,
+ "step": 2438
+ },
+ {
+ "epoch": 2.4396861617329875,
+ "grad_norm": 0.6240590214729309,
+ "learning_rate": 1.6624437839084862e-05,
+ "loss": 1.2081,
+ "step": 2439
+ },
+ {
+ "epoch": 2.4406864430621114,
+ "grad_norm": 0.661861777305603,
+ "learning_rate": 1.656660647959962e-05,
+ "loss": 1.0889,
+ "step": 2440
+ },
+ {
+ "epoch": 2.441686724391235,
+ "grad_norm": 0.6484658718109131,
+ "learning_rate": 1.6508866798324986e-05,
+ "loss": 1.2571,
+ "step": 2441
+ },
+ {
+ "epoch": 2.4426870057203587,
+ "grad_norm": 0.6831806302070618,
+ "learning_rate": 1.6451218858706374e-05,
+ "loss": 1.3593,
+ "step": 2442
+ },
+ {
+ "epoch": 2.4436872870494826,
+ "grad_norm": 0.8489123582839966,
+ "learning_rate": 1.6393662724088478e-05,
+ "loss": 1.4949,
+ "step": 2443
+ },
+ {
+ "epoch": 2.4446875683786065,
+ "grad_norm": 0.7886331081390381,
+ "learning_rate": 1.633619845771501e-05,
+ "loss": 1.1703,
+ "step": 2444
+ },
+ {
+ "epoch": 2.4456878497077303,
+ "grad_norm": 0.700215756893158,
+ "learning_rate": 1.627882612272893e-05,
+ "loss": 1.206,
+ "step": 2445
+ },
+ {
+ "epoch": 2.4466881310368542,
+ "grad_norm": 0.9291819930076599,
+ "learning_rate": 1.622154578217199e-05,
+ "loss": 1.2846,
+ "step": 2446
+ },
+ {
+ "epoch": 2.447688412365978,
+ "grad_norm": 0.5982426404953003,
+ "learning_rate": 1.6164357498984893e-05,
+ "loss": 0.9123,
+ "step": 2447
+ },
+ {
+ "epoch": 2.4486886936951016,
+ "grad_norm": 0.6755865812301636,
+ "learning_rate": 1.6107261336007285e-05,
+ "loss": 1.345,
+ "step": 2448
+ },
+ {
+ "epoch": 2.4496889750242254,
+ "grad_norm": 0.7251035571098328,
+ "learning_rate": 1.605025735597746e-05,
+ "loss": 1.2056,
+ "step": 2449
+ },
+ {
+ "epoch": 2.4506892563533493,
+ "grad_norm": 0.6493537425994873,
+ "learning_rate": 1.599334562153254e-05,
+ "loss": 1.1518,
+ "step": 2450
+ },
+ {
+ "epoch": 2.451689537682473,
+ "grad_norm": 0.5705104470252991,
+ "learning_rate": 1.593652619520819e-05,
+ "loss": 1.2496,
+ "step": 2451
+ },
+ {
+ "epoch": 2.452689819011597,
+ "grad_norm": 0.7152146100997925,
+ "learning_rate": 1.587979913943871e-05,
+ "loss": 1.1498,
+ "step": 2452
+ },
+ {
+ "epoch": 2.453690100340721,
+ "grad_norm": 0.5634121894836426,
+ "learning_rate": 1.5823164516556842e-05,
+ "loss": 1.1976,
+ "step": 2453
+ },
+ {
+ "epoch": 2.4546903816698444,
+ "grad_norm": 0.6320593953132629,
+ "learning_rate": 1.5766622388793838e-05,
+ "loss": 1.2146,
+ "step": 2454
+ },
+ {
+ "epoch": 2.4556906629989683,
+ "grad_norm": 0.5591527223587036,
+ "learning_rate": 1.5710172818279222e-05,
+ "loss": 1.1641,
+ "step": 2455
+ },
+ {
+ "epoch": 2.456690944328092,
+ "grad_norm": 0.6221880316734314,
+ "learning_rate": 1.5653815867040923e-05,
+ "loss": 1.3021,
+ "step": 2456
+ },
+ {
+ "epoch": 2.457691225657216,
+ "grad_norm": 0.6908786296844482,
+ "learning_rate": 1.5597551597004966e-05,
+ "loss": 1.2944,
+ "step": 2457
+ },
+ {
+ "epoch": 2.45869150698634,
+ "grad_norm": 0.7013647556304932,
+ "learning_rate": 1.554138006999568e-05,
+ "loss": 1.4917,
+ "step": 2458
+ },
+ {
+ "epoch": 2.459691788315464,
+ "grad_norm": 0.6134495735168457,
+ "learning_rate": 1.5485301347735348e-05,
+ "loss": 1.3146,
+ "step": 2459
+ },
+ {
+ "epoch": 2.4606920696445878,
+ "grad_norm": 0.709322988986969,
+ "learning_rate": 1.5429315491844388e-05,
+ "loss": 1.123,
+ "step": 2460
+ },
+ {
+ "epoch": 2.461692350973711,
+ "grad_norm": 0.5995905995368958,
+ "learning_rate": 1.5373422563841133e-05,
+ "loss": 1.2285,
+ "step": 2461
+ },
+ {
+ "epoch": 2.462692632302835,
+ "grad_norm": 0.5527827143669128,
+ "learning_rate": 1.531762262514177e-05,
+ "loss": 1.0853,
+ "step": 2462
+ },
+ {
+ "epoch": 2.463692913631959,
+ "grad_norm": 0.6579242944717407,
+ "learning_rate": 1.5261915737060384e-05,
+ "loss": 1.2392,
+ "step": 2463
+ },
+ {
+ "epoch": 2.464693194961083,
+ "grad_norm": 0.613092303276062,
+ "learning_rate": 1.5206301960808722e-05,
+ "loss": 1.1744,
+ "step": 2464
+ },
+ {
+ "epoch": 2.4656934762902067,
+ "grad_norm": 0.585175633430481,
+ "learning_rate": 1.5150781357496314e-05,
+ "loss": 1.0613,
+ "step": 2465
+ },
+ {
+ "epoch": 2.4666937576193306,
+ "grad_norm": 0.6694045066833496,
+ "learning_rate": 1.5095353988130235e-05,
+ "loss": 1.3062,
+ "step": 2466
+ },
+ {
+ "epoch": 2.467694038948454,
+ "grad_norm": 0.6092827320098877,
+ "learning_rate": 1.5040019913615123e-05,
+ "loss": 1.2655,
+ "step": 2467
+ },
+ {
+ "epoch": 2.468694320277578,
+ "grad_norm": 0.6726471781730652,
+ "learning_rate": 1.4984779194753151e-05,
+ "loss": 1.4597,
+ "step": 2468
+ },
+ {
+ "epoch": 2.469694601606702,
+ "grad_norm": 0.8569711446762085,
+ "learning_rate": 1.4929631892243856e-05,
+ "loss": 1.5663,
+ "step": 2469
+ },
+ {
+ "epoch": 2.4706948829358257,
+ "grad_norm": 0.8185261487960815,
+ "learning_rate": 1.4874578066684186e-05,
+ "loss": 1.3967,
+ "step": 2470
+ },
+ {
+ "epoch": 2.4716951642649496,
+ "grad_norm": 0.5982089042663574,
+ "learning_rate": 1.4819617778568285e-05,
+ "loss": 1.1807,
+ "step": 2471
+ },
+ {
+ "epoch": 2.4726954455940735,
+ "grad_norm": 0.5836905241012573,
+ "learning_rate": 1.476475108828762e-05,
+ "loss": 1.2433,
+ "step": 2472
+ },
+ {
+ "epoch": 2.4736957269231974,
+ "grad_norm": 0.6865205764770508,
+ "learning_rate": 1.4709978056130713e-05,
+ "loss": 1.2392,
+ "step": 2473
+ },
+ {
+ "epoch": 2.474696008252321,
+ "grad_norm": 0.6308683156967163,
+ "learning_rate": 1.4655298742283252e-05,
+ "loss": 1.1384,
+ "step": 2474
+ },
+ {
+ "epoch": 2.4756962895814447,
+ "grad_norm": 0.5822309255599976,
+ "learning_rate": 1.4600713206827932e-05,
+ "loss": 1.0825,
+ "step": 2475
+ },
+ {
+ "epoch": 2.4766965709105686,
+ "grad_norm": 0.6284547448158264,
+ "learning_rate": 1.454622150974434e-05,
+ "loss": 1.2787,
+ "step": 2476
+ },
+ {
+ "epoch": 2.4776968522396925,
+ "grad_norm": 0.9280424118041992,
+ "learning_rate": 1.4491823710909047e-05,
+ "loss": 1.364,
+ "step": 2477
+ },
+ {
+ "epoch": 2.4786971335688164,
+ "grad_norm": 0.6958044171333313,
+ "learning_rate": 1.4437519870095329e-05,
+ "loss": 1.2987,
+ "step": 2478
+ },
+ {
+ "epoch": 2.47969741489794,
+ "grad_norm": 0.6451518535614014,
+ "learning_rate": 1.4383310046973365e-05,
+ "loss": 0.995,
+ "step": 2479
+ },
+ {
+ "epoch": 2.4806976962270637,
+ "grad_norm": 0.7004462480545044,
+ "learning_rate": 1.4329194301109872e-05,
+ "loss": 1.325,
+ "step": 2480
+ },
+ {
+ "epoch": 2.4816979775561876,
+ "grad_norm": 0.6288760304450989,
+ "learning_rate": 1.427517269196833e-05,
+ "loss": 1.2204,
+ "step": 2481
+ },
+ {
+ "epoch": 2.4826982588853115,
+ "grad_norm": 0.7389028072357178,
+ "learning_rate": 1.4221245278908668e-05,
+ "loss": 1.3713,
+ "step": 2482
+ },
+ {
+ "epoch": 2.4836985402144354,
+ "grad_norm": 0.6103761196136475,
+ "learning_rate": 1.4167412121187406e-05,
+ "loss": 1.2789,
+ "step": 2483
+ },
+ {
+ "epoch": 2.4846988215435593,
+ "grad_norm": 0.7794318795204163,
+ "learning_rate": 1.4113673277957395e-05,
+ "loss": 1.2033,
+ "step": 2484
+ },
+ {
+ "epoch": 2.485699102872683,
+ "grad_norm": 0.6768416166305542,
+ "learning_rate": 1.4060028808267967e-05,
+ "loss": 1.2824,
+ "step": 2485
+ },
+ {
+ "epoch": 2.4866993842018066,
+ "grad_norm": 0.6633111238479614,
+ "learning_rate": 1.4006478771064646e-05,
+ "loss": 1.2352,
+ "step": 2486
+ },
+ {
+ "epoch": 2.4876996655309305,
+ "grad_norm": 0.5679075717926025,
+ "learning_rate": 1.3953023225189243e-05,
+ "loss": 1.1662,
+ "step": 2487
+ },
+ {
+ "epoch": 2.4886999468600544,
+ "grad_norm": 0.682587206363678,
+ "learning_rate": 1.389966222937974e-05,
+ "loss": 1.2081,
+ "step": 2488
+ },
+ {
+ "epoch": 2.4897002281891782,
+ "grad_norm": 0.5690992474555969,
+ "learning_rate": 1.3846395842270232e-05,
+ "loss": 1.1828,
+ "step": 2489
+ },
+ {
+ "epoch": 2.490700509518302,
+ "grad_norm": 0.725383460521698,
+ "learning_rate": 1.3793224122390858e-05,
+ "loss": 1.4199,
+ "step": 2490
+ },
+ {
+ "epoch": 2.491700790847426,
+ "grad_norm": 0.7507576942443848,
+ "learning_rate": 1.374014712816768e-05,
+ "loss": 1.2292,
+ "step": 2491
+ },
+ {
+ "epoch": 2.4927010721765495,
+ "grad_norm": 0.6473737955093384,
+ "learning_rate": 1.3687164917922768e-05,
+ "loss": 1.4711,
+ "step": 2492
+ },
+ {
+ "epoch": 2.4937013535056733,
+ "grad_norm": 0.7573422193527222,
+ "learning_rate": 1.3634277549873953e-05,
+ "loss": 1.4421,
+ "step": 2493
+ },
+ {
+ "epoch": 2.4947016348347972,
+ "grad_norm": 0.6150047779083252,
+ "learning_rate": 1.3581485082134882e-05,
+ "loss": 1.1721,
+ "step": 2494
+ },
+ {
+ "epoch": 2.495701916163921,
+ "grad_norm": 0.6486732959747314,
+ "learning_rate": 1.3528787572714952e-05,
+ "loss": 1.2939,
+ "step": 2495
+ },
+ {
+ "epoch": 2.496702197493045,
+ "grad_norm": 0.632709801197052,
+ "learning_rate": 1.3476185079519177e-05,
+ "loss": 1.4171,
+ "step": 2496
+ },
+ {
+ "epoch": 2.497702478822169,
+ "grad_norm": 0.7160407900810242,
+ "learning_rate": 1.342367766034821e-05,
+ "loss": 1.3601,
+ "step": 2497
+ },
+ {
+ "epoch": 2.498702760151293,
+ "grad_norm": 0.7090329527854919,
+ "learning_rate": 1.3371265372898167e-05,
+ "loss": 1.4255,
+ "step": 2498
+ },
+ {
+ "epoch": 2.499703041480416,
+ "grad_norm": 0.6394259929656982,
+ "learning_rate": 1.3318948274760734e-05,
+ "loss": 1.2454,
+ "step": 2499
+ },
+ {
+ "epoch": 2.50070332280954,
+ "grad_norm": 0.6224787831306458,
+ "learning_rate": 1.326672642342287e-05,
+ "loss": 1.3184,
+ "step": 2500
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 2997,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 3,
+ "save_steps": 500,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 2.0238530622357504e+16,
+ "train_batch_size": 2,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-2500/training_args.bin b/checkpoint-2500/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..6831a6d623a8a2b84942bb5584c6aa5bc14eee51
--- /dev/null
+++ b/checkpoint-2500/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5db131d6e82df60077bab037ec35113e1b0836a0bd72bb0a21e3fc0311a527de
+size 5304
diff --git a/checkpoint-2997/config.json b/checkpoint-2997/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..49d4bd1e1961ef7daf9af9a7dbe539789d58d949
--- /dev/null
+++ b/checkpoint-2997/config.json
@@ -0,0 +1,35 @@
+{
+ "_name_or_path": "facebook/nllb-200-distilled-600M",
+ "activation_dropout": 0.0,
+ "activation_function": "relu",
+ "architectures": [
+ "M2M100ForConditionalGeneration"
+ ],
+ "attention_dropout": 0.1,
+ "bos_token_id": 0,
+ "d_model": 1024,
+ "decoder_attention_heads": 16,
+ "decoder_ffn_dim": 4096,
+ "decoder_layerdrop": 0,
+ "decoder_layers": 12,
+ "decoder_start_token_id": 2,
+ "dropout": 0.1,
+ "encoder_attention_heads": 16,
+ "encoder_ffn_dim": 4096,
+ "encoder_layerdrop": 0,
+ "encoder_layers": 12,
+ "eos_token_id": 2,
+ "init_std": 0.02,
+ "is_encoder_decoder": true,
+ "max_length": 200,
+ "max_position_embeddings": 1024,
+ "model_type": "m2m_100",
+ "num_hidden_layers": 12,
+ "pad_token_id": 1,
+ "scale_embedding": true,
+ "tokenizer_class": "NllbTokenizer",
+ "torch_dtype": "float32",
+ "transformers_version": "4.43.1",
+ "use_cache": true,
+ "vocab_size": 256206
+}
diff --git a/checkpoint-2997/generation_config.json b/checkpoint-2997/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..680d3e0504023804deeb427766576194a0f17d47
--- /dev/null
+++ b/checkpoint-2997/generation_config.json
@@ -0,0 +1,9 @@
+{
+ "_from_model_config": true,
+ "bos_token_id": 0,
+ "decoder_start_token_id": 2,
+ "eos_token_id": 2,
+ "max_length": 200,
+ "pad_token_id": 1,
+ "transformers_version": "4.43.1"
+}
diff --git a/checkpoint-2997/model.safetensors b/checkpoint-2997/model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..47e7d43a1ceff6aa0a8c9d248c39bcb963f3d9a7
--- /dev/null
+++ b/checkpoint-2997/model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d3da8910a135ec4d37ea9c27c53fe9dcca53966e28c20510b4a7bd5b989a7e18
+size 2460354912
diff --git a/checkpoint-2997/optimizer.pt b/checkpoint-2997/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..d68554369589d0947629e4889bcd819643040f4b
--- /dev/null
+++ b/checkpoint-2997/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:33b9166e84abb85fe0a08b26db37aec7b0f57a956dfb281069c18394f421cec7
+size 5125261
diff --git a/checkpoint-2997/rng_state.pth b/checkpoint-2997/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..934b1a377b78d48fb8ab181779bdabce25aad14d
--- /dev/null
+++ b/checkpoint-2997/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6138f1f3d71483e7f92ae679a9b16aff9dabc6109b08e077bffaafb19324f389
+size 14244
diff --git a/checkpoint-2997/scheduler.pt b/checkpoint-2997/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..c8d7a14e843b88d04e82adecafb2059c0889c841
--- /dev/null
+++ b/checkpoint-2997/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:283f56cb64358dcfe445dadd782a570e2d61256c4354ad9314a950651aa88c8d
+size 1064
diff --git a/checkpoint-2997/sentencepiece.bpe.model b/checkpoint-2997/sentencepiece.bpe.model
new file mode 100644
index 0000000000000000000000000000000000000000..dc2262d3e1d375b235eb71c24119c8e73f85d4ad
--- /dev/null
+++ b/checkpoint-2997/sentencepiece.bpe.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:14bb8dfb35c0ffdea7bc01e56cea38b9e3d5efcdcb9c251d6b40538e1aab555a
+size 4852054
diff --git a/checkpoint-2997/special_tokens_map.json b/checkpoint-2997/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..770c6f4e25faf27bbc3878b806f2ecfb88c5169e
--- /dev/null
+++ b/checkpoint-2997/special_tokens_map.json
@@ -0,0 +1,255 @@
+{
+ "additional_special_tokens": [
+ "ace_Arab",
+ "ace_Latn",
+ "acm_Arab",
+ "acq_Arab",
+ "aeb_Arab",
+ "afr_Latn",
+ "ajp_Arab",
+ "aka_Latn",
+ "amh_Ethi",
+ "apc_Arab",
+ "arb_Arab",
+ "ars_Arab",
+ "ary_Arab",
+ "arz_Arab",
+ "asm_Beng",
+ "ast_Latn",
+ "awa_Deva",
+ "ayr_Latn",
+ "azb_Arab",
+ "azj_Latn",
+ "bak_Cyrl",
+ "bam_Latn",
+ "ban_Latn",
+ "bel_Cyrl",
+ "bem_Latn",
+ "ben_Beng",
+ "bho_Deva",
+ "bjn_Arab",
+ "bjn_Latn",
+ "bod_Tibt",
+ "bos_Latn",
+ "bug_Latn",
+ "bul_Cyrl",
+ "cat_Latn",
+ "ceb_Latn",
+ "ces_Latn",
+ "cjk_Latn",
+ "ckb_Arab",
+ "crh_Latn",
+ "cym_Latn",
+ "dan_Latn",
+ "deu_Latn",
+ "dik_Latn",
+ "dyu_Latn",
+ "dzo_Tibt",
+ "ell_Grek",
+ "eng_Latn",
+ "epo_Latn",
+ "est_Latn",
+ "eus_Latn",
+ "ewe_Latn",
+ "fao_Latn",
+ "pes_Arab",
+ "fij_Latn",
+ "fin_Latn",
+ "fon_Latn",
+ "fra_Latn",
+ "fur_Latn",
+ "fuv_Latn",
+ "gla_Latn",
+ "gle_Latn",
+ "glg_Latn",
+ "grn_Latn",
+ "guj_Gujr",
+ "hat_Latn",
+ "hau_Latn",
+ "heb_Hebr",
+ "hin_Deva",
+ "hne_Deva",
+ "hrv_Latn",
+ "hun_Latn",
+ "hye_Armn",
+ "ibo_Latn",
+ "ilo_Latn",
+ "ind_Latn",
+ "isl_Latn",
+ "ita_Latn",
+ "jav_Latn",
+ "jpn_Jpan",
+ "kab_Latn",
+ "kac_Latn",
+ "kam_Latn",
+ "kan_Knda",
+ "kas_Arab",
+ "kas_Deva",
+ "kat_Geor",
+ "knc_Arab",
+ "knc_Latn",
+ "kaz_Cyrl",
+ "kbp_Latn",
+ "kea_Latn",
+ "khm_Khmr",
+ "kik_Latn",
+ "kin_Latn",
+ "kir_Cyrl",
+ "kmb_Latn",
+ "kon_Latn",
+ "kor_Hang",
+ "kmr_Latn",
+ "lao_Laoo",
+ "lvs_Latn",
+ "lij_Latn",
+ "lim_Latn",
+ "lin_Latn",
+ "lit_Latn",
+ "lmo_Latn",
+ "ltg_Latn",
+ "ltz_Latn",
+ "lua_Latn",
+ "lug_Latn",
+ "luo_Latn",
+ "lus_Latn",
+ "mag_Deva",
+ "mai_Deva",
+ "mal_Mlym",
+ "mar_Deva",
+ "min_Latn",
+ "mkd_Cyrl",
+ "plt_Latn",
+ "mlt_Latn",
+ "mni_Beng",
+ "khk_Cyrl",
+ "mos_Latn",
+ "mri_Latn",
+ "zsm_Latn",
+ "mya_Mymr",
+ "nld_Latn",
+ "nno_Latn",
+ "nob_Latn",
+ "npi_Deva",
+ "nso_Latn",
+ "nus_Latn",
+ "nya_Latn",
+ "oci_Latn",
+ "gaz_Latn",
+ "ory_Orya",
+ "pag_Latn",
+ "pan_Guru",
+ "pap_Latn",
+ "pol_Latn",
+ "por_Latn",
+ "prs_Arab",
+ "pbt_Arab",
+ "quy_Latn",
+ "ron_Latn",
+ "run_Latn",
+ "rus_Cyrl",
+ "sag_Latn",
+ "san_Deva",
+ "sat_Beng",
+ "scn_Latn",
+ "shn_Mymr",
+ "sin_Sinh",
+ "slk_Latn",
+ "slv_Latn",
+ "smo_Latn",
+ "sna_Latn",
+ "snd_Arab",
+ "som_Latn",
+ "sot_Latn",
+ "spa_Latn",
+ "als_Latn",
+ "srd_Latn",
+ "srp_Cyrl",
+ "ssw_Latn",
+ "sun_Latn",
+ "swe_Latn",
+ "swh_Latn",
+ "szl_Latn",
+ "tam_Taml",
+ "tat_Cyrl",
+ "tel_Telu",
+ "tgk_Cyrl",
+ "tgl_Latn",
+ "tha_Thai",
+ "tir_Ethi",
+ "taq_Latn",
+ "taq_Tfng",
+ "tpi_Latn",
+ "tsn_Latn",
+ "tso_Latn",
+ "tuk_Latn",
+ "tum_Latn",
+ "tur_Latn",
+ "twi_Latn",
+ "tzm_Tfng",
+ "uig_Arab",
+ "ukr_Cyrl",
+ "umb_Latn",
+ "urd_Arab",
+ "uzn_Latn",
+ "vec_Latn",
+ "vie_Latn",
+ "war_Latn",
+ "wol_Latn",
+ "xho_Latn",
+ "ydd_Hebr",
+ "yor_Latn",
+ "yue_Hant",
+ "zho_Hans",
+ "zho_Hant",
+ "zul_Latn"
+ ],
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "cls_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "mask_token": {
+ "content": "",
+ "lstrip": true,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "sep_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-2997/tokenizer.json b/checkpoint-2997/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..98050e98b98364c06d83b3f41864076220cb8408
--- /dev/null
+++ b/checkpoint-2997/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3b39b25b0763a1dd69dec54081fafcf10770d9f2538a3bd975a0c4be6d60a9c2
+size 17331294
diff --git a/checkpoint-2997/tokenizer_config.json b/checkpoint-2997/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..f1424d3657c008568198b44be241646482e7e9f2
--- /dev/null
+++ b/checkpoint-2997/tokenizer_config.json
@@ -0,0 +1,1878 @@
+{
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "3": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256001": {
+ "content": "ace_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256002": {
+ "content": "ace_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256003": {
+ "content": "acm_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256004": {
+ "content": "acq_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256005": {
+ "content": "aeb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256006": {
+ "content": "afr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256007": {
+ "content": "ajp_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256008": {
+ "content": "aka_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256009": {
+ "content": "amh_Ethi",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256010": {
+ "content": "apc_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256011": {
+ "content": "arb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256012": {
+ "content": "ars_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256013": {
+ "content": "ary_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256014": {
+ "content": "arz_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256015": {
+ "content": "asm_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256016": {
+ "content": "ast_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256017": {
+ "content": "awa_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256018": {
+ "content": "ayr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256019": {
+ "content": "azb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256020": {
+ "content": "azj_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256021": {
+ "content": "bak_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256022": {
+ "content": "bam_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256023": {
+ "content": "ban_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256024": {
+ "content": "bel_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256025": {
+ "content": "bem_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256026": {
+ "content": "ben_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256027": {
+ "content": "bho_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256028": {
+ "content": "bjn_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256029": {
+ "content": "bjn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256030": {
+ "content": "bod_Tibt",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256031": {
+ "content": "bos_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256032": {
+ "content": "bug_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256033": {
+ "content": "bul_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256034": {
+ "content": "cat_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256035": {
+ "content": "ceb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256036": {
+ "content": "ces_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256037": {
+ "content": "cjk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256038": {
+ "content": "ckb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256039": {
+ "content": "crh_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256040": {
+ "content": "cym_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256041": {
+ "content": "dan_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256042": {
+ "content": "deu_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256043": {
+ "content": "dik_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256044": {
+ "content": "dyu_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256045": {
+ "content": "dzo_Tibt",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256046": {
+ "content": "ell_Grek",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256047": {
+ "content": "eng_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256048": {
+ "content": "epo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256049": {
+ "content": "est_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256050": {
+ "content": "eus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256051": {
+ "content": "ewe_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256052": {
+ "content": "fao_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256053": {
+ "content": "pes_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256054": {
+ "content": "fij_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256055": {
+ "content": "fin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256056": {
+ "content": "fon_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256057": {
+ "content": "fra_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256058": {
+ "content": "fur_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256059": {
+ "content": "fuv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256060": {
+ "content": "gla_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256061": {
+ "content": "gle_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256062": {
+ "content": "glg_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256063": {
+ "content": "grn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256064": {
+ "content": "guj_Gujr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256065": {
+ "content": "hat_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256066": {
+ "content": "hau_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256067": {
+ "content": "heb_Hebr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256068": {
+ "content": "hin_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256069": {
+ "content": "hne_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256070": {
+ "content": "hrv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256071": {
+ "content": "hun_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256072": {
+ "content": "hye_Armn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256073": {
+ "content": "ibo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256074": {
+ "content": "ilo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256075": {
+ "content": "ind_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256076": {
+ "content": "isl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256077": {
+ "content": "ita_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256078": {
+ "content": "jav_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256079": {
+ "content": "jpn_Jpan",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256080": {
+ "content": "kab_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256081": {
+ "content": "kac_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256082": {
+ "content": "kam_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256083": {
+ "content": "kan_Knda",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256084": {
+ "content": "kas_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256085": {
+ "content": "kas_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256086": {
+ "content": "kat_Geor",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256087": {
+ "content": "knc_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256088": {
+ "content": "knc_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256089": {
+ "content": "kaz_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256090": {
+ "content": "kbp_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256091": {
+ "content": "kea_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256092": {
+ "content": "khm_Khmr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256093": {
+ "content": "kik_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256094": {
+ "content": "kin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256095": {
+ "content": "kir_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256096": {
+ "content": "kmb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256097": {
+ "content": "kon_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256098": {
+ "content": "kor_Hang",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256099": {
+ "content": "kmr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256100": {
+ "content": "lao_Laoo",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256101": {
+ "content": "lvs_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256102": {
+ "content": "lij_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256103": {
+ "content": "lim_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256104": {
+ "content": "lin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256105": {
+ "content": "lit_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256106": {
+ "content": "lmo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256107": {
+ "content": "ltg_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256108": {
+ "content": "ltz_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256109": {
+ "content": "lua_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256110": {
+ "content": "lug_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256111": {
+ "content": "luo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256112": {
+ "content": "lus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256113": {
+ "content": "mag_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256114": {
+ "content": "mai_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256115": {
+ "content": "mal_Mlym",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256116": {
+ "content": "mar_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256117": {
+ "content": "min_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256118": {
+ "content": "mkd_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256119": {
+ "content": "plt_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256120": {
+ "content": "mlt_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256121": {
+ "content": "mni_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256122": {
+ "content": "khk_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256123": {
+ "content": "mos_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256124": {
+ "content": "mri_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256125": {
+ "content": "zsm_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256126": {
+ "content": "mya_Mymr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256127": {
+ "content": "nld_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256128": {
+ "content": "nno_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256129": {
+ "content": "nob_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256130": {
+ "content": "npi_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256131": {
+ "content": "nso_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256132": {
+ "content": "nus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256133": {
+ "content": "nya_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256134": {
+ "content": "oci_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256135": {
+ "content": "gaz_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256136": {
+ "content": "ory_Orya",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256137": {
+ "content": "pag_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256138": {
+ "content": "pan_Guru",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256139": {
+ "content": "pap_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256140": {
+ "content": "pol_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256141": {
+ "content": "por_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256142": {
+ "content": "prs_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256143": {
+ "content": "pbt_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256144": {
+ "content": "quy_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256145": {
+ "content": "ron_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256146": {
+ "content": "run_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256147": {
+ "content": "rus_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256148": {
+ "content": "sag_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256149": {
+ "content": "san_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256150": {
+ "content": "sat_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256151": {
+ "content": "scn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256152": {
+ "content": "shn_Mymr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256153": {
+ "content": "sin_Sinh",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256154": {
+ "content": "slk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256155": {
+ "content": "slv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256156": {
+ "content": "smo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256157": {
+ "content": "sna_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256158": {
+ "content": "snd_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256159": {
+ "content": "som_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256160": {
+ "content": "sot_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256161": {
+ "content": "spa_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256162": {
+ "content": "als_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256163": {
+ "content": "srd_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256164": {
+ "content": "srp_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256165": {
+ "content": "ssw_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256166": {
+ "content": "sun_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256167": {
+ "content": "swe_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256168": {
+ "content": "swh_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256169": {
+ "content": "szl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256170": {
+ "content": "tam_Taml",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256171": {
+ "content": "tat_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256172": {
+ "content": "tel_Telu",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256173": {
+ "content": "tgk_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256174": {
+ "content": "tgl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256175": {
+ "content": "tha_Thai",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256176": {
+ "content": "tir_Ethi",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256177": {
+ "content": "taq_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256178": {
+ "content": "taq_Tfng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256179": {
+ "content": "tpi_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256180": {
+ "content": "tsn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256181": {
+ "content": "tso_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256182": {
+ "content": "tuk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256183": {
+ "content": "tum_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256184": {
+ "content": "tur_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256185": {
+ "content": "twi_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256186": {
+ "content": "tzm_Tfng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256187": {
+ "content": "uig_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256188": {
+ "content": "ukr_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256189": {
+ "content": "umb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256190": {
+ "content": "urd_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256191": {
+ "content": "uzn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256192": {
+ "content": "vec_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256193": {
+ "content": "vie_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256194": {
+ "content": "war_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256195": {
+ "content": "wol_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256196": {
+ "content": "xho_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256197": {
+ "content": "ydd_Hebr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256198": {
+ "content": "yor_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256199": {
+ "content": "yue_Hant",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256200": {
+ "content": "zho_Hans",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256201": {
+ "content": "zho_Hant",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256202": {
+ "content": "zul_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256203": {
+ "content": "",
+ "lstrip": true,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "additional_special_tokens": [
+ "ace_Arab",
+ "ace_Latn",
+ "acm_Arab",
+ "acq_Arab",
+ "aeb_Arab",
+ "afr_Latn",
+ "ajp_Arab",
+ "aka_Latn",
+ "amh_Ethi",
+ "apc_Arab",
+ "arb_Arab",
+ "ars_Arab",
+ "ary_Arab",
+ "arz_Arab",
+ "asm_Beng",
+ "ast_Latn",
+ "awa_Deva",
+ "ayr_Latn",
+ "azb_Arab",
+ "azj_Latn",
+ "bak_Cyrl",
+ "bam_Latn",
+ "ban_Latn",
+ "bel_Cyrl",
+ "bem_Latn",
+ "ben_Beng",
+ "bho_Deva",
+ "bjn_Arab",
+ "bjn_Latn",
+ "bod_Tibt",
+ "bos_Latn",
+ "bug_Latn",
+ "bul_Cyrl",
+ "cat_Latn",
+ "ceb_Latn",
+ "ces_Latn",
+ "cjk_Latn",
+ "ckb_Arab",
+ "crh_Latn",
+ "cym_Latn",
+ "dan_Latn",
+ "deu_Latn",
+ "dik_Latn",
+ "dyu_Latn",
+ "dzo_Tibt",
+ "ell_Grek",
+ "eng_Latn",
+ "epo_Latn",
+ "est_Latn",
+ "eus_Latn",
+ "ewe_Latn",
+ "fao_Latn",
+ "pes_Arab",
+ "fij_Latn",
+ "fin_Latn",
+ "fon_Latn",
+ "fra_Latn",
+ "fur_Latn",
+ "fuv_Latn",
+ "gla_Latn",
+ "gle_Latn",
+ "glg_Latn",
+ "grn_Latn",
+ "guj_Gujr",
+ "hat_Latn",
+ "hau_Latn",
+ "heb_Hebr",
+ "hin_Deva",
+ "hne_Deva",
+ "hrv_Latn",
+ "hun_Latn",
+ "hye_Armn",
+ "ibo_Latn",
+ "ilo_Latn",
+ "ind_Latn",
+ "isl_Latn",
+ "ita_Latn",
+ "jav_Latn",
+ "jpn_Jpan",
+ "kab_Latn",
+ "kac_Latn",
+ "kam_Latn",
+ "kan_Knda",
+ "kas_Arab",
+ "kas_Deva",
+ "kat_Geor",
+ "knc_Arab",
+ "knc_Latn",
+ "kaz_Cyrl",
+ "kbp_Latn",
+ "kea_Latn",
+ "khm_Khmr",
+ "kik_Latn",
+ "kin_Latn",
+ "kir_Cyrl",
+ "kmb_Latn",
+ "kon_Latn",
+ "kor_Hang",
+ "kmr_Latn",
+ "lao_Laoo",
+ "lvs_Latn",
+ "lij_Latn",
+ "lim_Latn",
+ "lin_Latn",
+ "lit_Latn",
+ "lmo_Latn",
+ "ltg_Latn",
+ "ltz_Latn",
+ "lua_Latn",
+ "lug_Latn",
+ "luo_Latn",
+ "lus_Latn",
+ "mag_Deva",
+ "mai_Deva",
+ "mal_Mlym",
+ "mar_Deva",
+ "min_Latn",
+ "mkd_Cyrl",
+ "plt_Latn",
+ "mlt_Latn",
+ "mni_Beng",
+ "khk_Cyrl",
+ "mos_Latn",
+ "mri_Latn",
+ "zsm_Latn",
+ "mya_Mymr",
+ "nld_Latn",
+ "nno_Latn",
+ "nob_Latn",
+ "npi_Deva",
+ "nso_Latn",
+ "nus_Latn",
+ "nya_Latn",
+ "oci_Latn",
+ "gaz_Latn",
+ "ory_Orya",
+ "pag_Latn",
+ "pan_Guru",
+ "pap_Latn",
+ "pol_Latn",
+ "por_Latn",
+ "prs_Arab",
+ "pbt_Arab",
+ "quy_Latn",
+ "ron_Latn",
+ "run_Latn",
+ "rus_Cyrl",
+ "sag_Latn",
+ "san_Deva",
+ "sat_Beng",
+ "scn_Latn",
+ "shn_Mymr",
+ "sin_Sinh",
+ "slk_Latn",
+ "slv_Latn",
+ "smo_Latn",
+ "sna_Latn",
+ "snd_Arab",
+ "som_Latn",
+ "sot_Latn",
+ "spa_Latn",
+ "als_Latn",
+ "srd_Latn",
+ "srp_Cyrl",
+ "ssw_Latn",
+ "sun_Latn",
+ "swe_Latn",
+ "swh_Latn",
+ "szl_Latn",
+ "tam_Taml",
+ "tat_Cyrl",
+ "tel_Telu",
+ "tgk_Cyrl",
+ "tgl_Latn",
+ "tha_Thai",
+ "tir_Ethi",
+ "taq_Latn",
+ "taq_Tfng",
+ "tpi_Latn",
+ "tsn_Latn",
+ "tso_Latn",
+ "tuk_Latn",
+ "tum_Latn",
+ "tur_Latn",
+ "twi_Latn",
+ "tzm_Tfng",
+ "uig_Arab",
+ "ukr_Cyrl",
+ "umb_Latn",
+ "urd_Arab",
+ "uzn_Latn",
+ "vec_Latn",
+ "vie_Latn",
+ "war_Latn",
+ "wol_Latn",
+ "xho_Latn",
+ "ydd_Hebr",
+ "yor_Latn",
+ "yue_Hant",
+ "zho_Hans",
+ "zho_Hant",
+ "zul_Latn"
+ ],
+ "bos_token": "",
+ "clean_up_tokenization_spaces": true,
+ "cls_token": "",
+ "eos_token": "",
+ "legacy_behaviour": false,
+ "mask_token": "",
+ "model_max_length": 1024,
+ "pad_token": "",
+ "sep_token": "",
+ "sp_model_kwargs": {},
+ "src_lang": "eng_Latn",
+ "tgt_lang": null,
+ "tokenizer_class": "NllbTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-2997/trainer_state.json b/checkpoint-2997/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..c1b7bee64fe9d7a7977b0e645a1b5ea3e05e9bd8
--- /dev/null
+++ b/checkpoint-2997/trainer_state.json
@@ -0,0 +1,21012 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 2.9978431433840766,
+ "eval_steps": 500,
+ "global_step": 2997,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.001000281329123816,
+ "grad_norm": 7.503077030181885,
+ "learning_rate": 0.0001999999450590425,
+ "loss": 3.6513,
+ "step": 1
+ },
+ {
+ "epoch": 0.002000562658247632,
+ "grad_norm": 3.1191189289093018,
+ "learning_rate": 0.00019999978023623033,
+ "loss": 2.8683,
+ "step": 2
+ },
+ {
+ "epoch": 0.003000843987371448,
+ "grad_norm": 1.9282511472702026,
+ "learning_rate": 0.0001999995055317446,
+ "loss": 2.7882,
+ "step": 3
+ },
+ {
+ "epoch": 0.004001125316495264,
+ "grad_norm": 1.726026177406311,
+ "learning_rate": 0.00019999912094588717,
+ "loss": 2.7716,
+ "step": 4
+ },
+ {
+ "epoch": 0.005001406645619081,
+ "grad_norm": 1.4632996320724487,
+ "learning_rate": 0.00019999862647908064,
+ "loss": 2.6869,
+ "step": 5
+ },
+ {
+ "epoch": 0.006001687974742896,
+ "grad_norm": 1.5544543266296387,
+ "learning_rate": 0.00019999802213186834,
+ "loss": 2.6952,
+ "step": 6
+ },
+ {
+ "epoch": 0.007001969303866712,
+ "grad_norm": 1.5888980627059937,
+ "learning_rate": 0.0001999973079049143,
+ "loss": 2.1237,
+ "step": 7
+ },
+ {
+ "epoch": 0.008002250632990529,
+ "grad_norm": 1.8750641345977783,
+ "learning_rate": 0.00019999648379900338,
+ "loss": 2.3376,
+ "step": 8
+ },
+ {
+ "epoch": 0.009002531962114344,
+ "grad_norm": 1.0540648698806763,
+ "learning_rate": 0.0001999955498150411,
+ "loss": 2.4896,
+ "step": 9
+ },
+ {
+ "epoch": 0.010002813291238161,
+ "grad_norm": 1.0269274711608887,
+ "learning_rate": 0.00019999450595405374,
+ "loss": 2.1365,
+ "step": 10
+ },
+ {
+ "epoch": 0.011003094620361977,
+ "grad_norm": 1.0851730108261108,
+ "learning_rate": 0.0001999933522171883,
+ "loss": 2.235,
+ "step": 11
+ },
+ {
+ "epoch": 0.012003375949485792,
+ "grad_norm": 0.927042543888092,
+ "learning_rate": 0.00019999208860571255,
+ "loss": 2.2438,
+ "step": 12
+ },
+ {
+ "epoch": 0.01300365727860961,
+ "grad_norm": 1.3729208707809448,
+ "learning_rate": 0.00019999071512101496,
+ "loss": 2.0845,
+ "step": 13
+ },
+ {
+ "epoch": 0.014003938607733425,
+ "grad_norm": 1.1325910091400146,
+ "learning_rate": 0.00019998923176460474,
+ "loss": 2.0668,
+ "step": 14
+ },
+ {
+ "epoch": 0.01500421993685724,
+ "grad_norm": 0.9290457367897034,
+ "learning_rate": 0.00019998763853811184,
+ "loss": 2.0227,
+ "step": 15
+ },
+ {
+ "epoch": 0.016004501265981057,
+ "grad_norm": 0.942140519618988,
+ "learning_rate": 0.00019998593544328692,
+ "loss": 2.1598,
+ "step": 16
+ },
+ {
+ "epoch": 0.017004782595104875,
+ "grad_norm": 1.096635103225708,
+ "learning_rate": 0.00019998412248200138,
+ "loss": 2.1897,
+ "step": 17
+ },
+ {
+ "epoch": 0.01800506392422869,
+ "grad_norm": 1.1107186079025269,
+ "learning_rate": 0.00019998219965624734,
+ "loss": 2.0546,
+ "step": 18
+ },
+ {
+ "epoch": 0.019005345253352506,
+ "grad_norm": 0.9696593880653381,
+ "learning_rate": 0.0001999801669681376,
+ "loss": 2.0317,
+ "step": 19
+ },
+ {
+ "epoch": 0.020005626582476323,
+ "grad_norm": 0.9394300580024719,
+ "learning_rate": 0.00019997802441990573,
+ "loss": 2.2883,
+ "step": 20
+ },
+ {
+ "epoch": 0.021005907911600136,
+ "grad_norm": 1.08865225315094,
+ "learning_rate": 0.00019997577201390606,
+ "loss": 1.9838,
+ "step": 21
+ },
+ {
+ "epoch": 0.022006189240723954,
+ "grad_norm": 1.0712405443191528,
+ "learning_rate": 0.00019997340975261353,
+ "loss": 2.1177,
+ "step": 22
+ },
+ {
+ "epoch": 0.02300647056984777,
+ "grad_norm": 1.3190314769744873,
+ "learning_rate": 0.00019997093763862383,
+ "loss": 1.9755,
+ "step": 23
+ },
+ {
+ "epoch": 0.024006751898971584,
+ "grad_norm": 1.0659812688827515,
+ "learning_rate": 0.0001999683556746534,
+ "loss": 1.9829,
+ "step": 24
+ },
+ {
+ "epoch": 0.0250070332280954,
+ "grad_norm": 1.1824345588684082,
+ "learning_rate": 0.0001999656638635393,
+ "loss": 2.4219,
+ "step": 25
+ },
+ {
+ "epoch": 0.02600731455721922,
+ "grad_norm": 1.3446214199066162,
+ "learning_rate": 0.0001999628622082394,
+ "loss": 1.9644,
+ "step": 26
+ },
+ {
+ "epoch": 0.027007595886343033,
+ "grad_norm": 1.2527475357055664,
+ "learning_rate": 0.0001999599507118322,
+ "loss": 2.1889,
+ "step": 27
+ },
+ {
+ "epoch": 0.02800787721546685,
+ "grad_norm": 1.4738999605178833,
+ "learning_rate": 0.00019995692937751683,
+ "loss": 2.1949,
+ "step": 28
+ },
+ {
+ "epoch": 0.029008158544590667,
+ "grad_norm": 1.0533576011657715,
+ "learning_rate": 0.0001999537982086133,
+ "loss": 2.1034,
+ "step": 29
+ },
+ {
+ "epoch": 0.03000843987371448,
+ "grad_norm": 1.0343223810195923,
+ "learning_rate": 0.00019995055720856218,
+ "loss": 1.9561,
+ "step": 30
+ },
+ {
+ "epoch": 0.031008721202838298,
+ "grad_norm": 1.1149976253509521,
+ "learning_rate": 0.00019994720638092468,
+ "loss": 2.0981,
+ "step": 31
+ },
+ {
+ "epoch": 0.032009002531962115,
+ "grad_norm": 1.197178840637207,
+ "learning_rate": 0.00019994374572938277,
+ "loss": 2.1587,
+ "step": 32
+ },
+ {
+ "epoch": 0.03300928386108593,
+ "grad_norm": 0.9382303953170776,
+ "learning_rate": 0.00019994017525773913,
+ "loss": 1.869,
+ "step": 33
+ },
+ {
+ "epoch": 0.03400956519020975,
+ "grad_norm": 1.0526461601257324,
+ "learning_rate": 0.00019993649496991705,
+ "loss": 1.9045,
+ "step": 34
+ },
+ {
+ "epoch": 0.03500984651933356,
+ "grad_norm": 0.8510498404502869,
+ "learning_rate": 0.00019993270486996046,
+ "loss": 2.1005,
+ "step": 35
+ },
+ {
+ "epoch": 0.03601012784845738,
+ "grad_norm": 0.9990401268005371,
+ "learning_rate": 0.000199928804962034,
+ "loss": 1.8569,
+ "step": 36
+ },
+ {
+ "epoch": 0.037010409177581194,
+ "grad_norm": 0.9243854284286499,
+ "learning_rate": 0.00019992479525042303,
+ "loss": 1.9666,
+ "step": 37
+ },
+ {
+ "epoch": 0.03801069050670501,
+ "grad_norm": 0.7774227261543274,
+ "learning_rate": 0.00019992067573953342,
+ "loss": 2.0376,
+ "step": 38
+ },
+ {
+ "epoch": 0.03901097183582883,
+ "grad_norm": 0.8114833235740662,
+ "learning_rate": 0.0001999164464338918,
+ "loss": 2.1608,
+ "step": 39
+ },
+ {
+ "epoch": 0.040011253164952645,
+ "grad_norm": 0.8716320395469666,
+ "learning_rate": 0.0001999121073381454,
+ "loss": 2.0743,
+ "step": 40
+ },
+ {
+ "epoch": 0.041011534494076456,
+ "grad_norm": 0.9571239948272705,
+ "learning_rate": 0.0001999076584570621,
+ "loss": 2.0128,
+ "step": 41
+ },
+ {
+ "epoch": 0.04201181582320027,
+ "grad_norm": 1.038691520690918,
+ "learning_rate": 0.00019990309979553045,
+ "loss": 1.976,
+ "step": 42
+ },
+ {
+ "epoch": 0.04301209715232409,
+ "grad_norm": 1.0576292276382446,
+ "learning_rate": 0.00019989843135855958,
+ "loss": 1.94,
+ "step": 43
+ },
+ {
+ "epoch": 0.04401237848144791,
+ "grad_norm": 1.0991204977035522,
+ "learning_rate": 0.00019989365315127922,
+ "loss": 1.9397,
+ "step": 44
+ },
+ {
+ "epoch": 0.045012659810571724,
+ "grad_norm": 0.9268686175346375,
+ "learning_rate": 0.0001998887651789398,
+ "loss": 1.9305,
+ "step": 45
+ },
+ {
+ "epoch": 0.04601294113969554,
+ "grad_norm": 0.8459104299545288,
+ "learning_rate": 0.0001998837674469123,
+ "loss": 1.7941,
+ "step": 46
+ },
+ {
+ "epoch": 0.04701322246881936,
+ "grad_norm": 0.9260527491569519,
+ "learning_rate": 0.00019987865996068833,
+ "loss": 1.8843,
+ "step": 47
+ },
+ {
+ "epoch": 0.04801350379794317,
+ "grad_norm": 0.8370497226715088,
+ "learning_rate": 0.00019987344272588006,
+ "loss": 1.8779,
+ "step": 48
+ },
+ {
+ "epoch": 0.049013785127066986,
+ "grad_norm": 0.9228008389472961,
+ "learning_rate": 0.00019986811574822033,
+ "loss": 2.1713,
+ "step": 49
+ },
+ {
+ "epoch": 0.0500140664561908,
+ "grad_norm": 1.013746738433838,
+ "learning_rate": 0.00019986267903356254,
+ "loss": 2.1443,
+ "step": 50
+ },
+ {
+ "epoch": 0.05101434778531462,
+ "grad_norm": 1.0155737400054932,
+ "learning_rate": 0.0001998571325878806,
+ "loss": 1.9679,
+ "step": 51
+ },
+ {
+ "epoch": 0.05201462911443844,
+ "grad_norm": 0.9591345191001892,
+ "learning_rate": 0.0001998514764172691,
+ "loss": 2.0611,
+ "step": 52
+ },
+ {
+ "epoch": 0.053014910443562255,
+ "grad_norm": 0.9030050039291382,
+ "learning_rate": 0.00019984571052794313,
+ "loss": 1.9698,
+ "step": 53
+ },
+ {
+ "epoch": 0.054015191772686065,
+ "grad_norm": 0.7697799205780029,
+ "learning_rate": 0.00019983983492623833,
+ "loss": 2.0609,
+ "step": 54
+ },
+ {
+ "epoch": 0.05501547310180988,
+ "grad_norm": 0.8806005716323853,
+ "learning_rate": 0.00019983384961861096,
+ "loss": 1.9756,
+ "step": 55
+ },
+ {
+ "epoch": 0.0560157544309337,
+ "grad_norm": 0.9424449801445007,
+ "learning_rate": 0.0001998277546116378,
+ "loss": 2.0913,
+ "step": 56
+ },
+ {
+ "epoch": 0.05701603576005752,
+ "grad_norm": 1.139495849609375,
+ "learning_rate": 0.00019982154991201608,
+ "loss": 2.2524,
+ "step": 57
+ },
+ {
+ "epoch": 0.058016317089181334,
+ "grad_norm": 1.094347357749939,
+ "learning_rate": 0.00019981523552656377,
+ "loss": 1.8501,
+ "step": 58
+ },
+ {
+ "epoch": 0.05901659841830515,
+ "grad_norm": 1.1519278287887573,
+ "learning_rate": 0.00019980881146221914,
+ "loss": 1.9866,
+ "step": 59
+ },
+ {
+ "epoch": 0.06001687974742896,
+ "grad_norm": 1.2018250226974487,
+ "learning_rate": 0.00019980227772604112,
+ "loss": 1.8226,
+ "step": 60
+ },
+ {
+ "epoch": 0.06101716107655278,
+ "grad_norm": 0.9565753936767578,
+ "learning_rate": 0.0001997956343252091,
+ "loss": 1.8434,
+ "step": 61
+ },
+ {
+ "epoch": 0.062017442405676595,
+ "grad_norm": 1.0832768678665161,
+ "learning_rate": 0.00019978888126702296,
+ "loss": 2.1271,
+ "step": 62
+ },
+ {
+ "epoch": 0.06301772373480041,
+ "grad_norm": 0.8973837494850159,
+ "learning_rate": 0.00019978201855890308,
+ "loss": 1.8331,
+ "step": 63
+ },
+ {
+ "epoch": 0.06401800506392423,
+ "grad_norm": 0.8754604458808899,
+ "learning_rate": 0.00019977504620839035,
+ "loss": 2.1379,
+ "step": 64
+ },
+ {
+ "epoch": 0.06501828639304805,
+ "grad_norm": 0.8244839310646057,
+ "learning_rate": 0.00019976796422314615,
+ "loss": 1.8431,
+ "step": 65
+ },
+ {
+ "epoch": 0.06601856772217186,
+ "grad_norm": 0.8213551044464111,
+ "learning_rate": 0.00019976077261095226,
+ "loss": 1.9155,
+ "step": 66
+ },
+ {
+ "epoch": 0.06701884905129568,
+ "grad_norm": 0.9140985608100891,
+ "learning_rate": 0.00019975347137971098,
+ "loss": 2.0651,
+ "step": 67
+ },
+ {
+ "epoch": 0.0680191303804195,
+ "grad_norm": 0.8518921732902527,
+ "learning_rate": 0.00019974606053744503,
+ "loss": 1.8197,
+ "step": 68
+ },
+ {
+ "epoch": 0.06901941170954332,
+ "grad_norm": 0.8397145867347717,
+ "learning_rate": 0.00019973854009229763,
+ "loss": 1.8621,
+ "step": 69
+ },
+ {
+ "epoch": 0.07001969303866712,
+ "grad_norm": 0.8727964162826538,
+ "learning_rate": 0.00019973091005253232,
+ "loss": 1.762,
+ "step": 70
+ },
+ {
+ "epoch": 0.07101997436779094,
+ "grad_norm": 0.9284623265266418,
+ "learning_rate": 0.0001997231704265332,
+ "loss": 1.8675,
+ "step": 71
+ },
+ {
+ "epoch": 0.07202025569691475,
+ "grad_norm": 0.8280015587806702,
+ "learning_rate": 0.00019971532122280464,
+ "loss": 1.931,
+ "step": 72
+ },
+ {
+ "epoch": 0.07302053702603857,
+ "grad_norm": 0.7591394186019897,
+ "learning_rate": 0.0001997073624499716,
+ "loss": 1.8485,
+ "step": 73
+ },
+ {
+ "epoch": 0.07402081835516239,
+ "grad_norm": 0.975128710269928,
+ "learning_rate": 0.0001996992941167792,
+ "loss": 2.0784,
+ "step": 74
+ },
+ {
+ "epoch": 0.0750210996842862,
+ "grad_norm": 0.8034948110580444,
+ "learning_rate": 0.00019969111623209323,
+ "loss": 1.9849,
+ "step": 75
+ },
+ {
+ "epoch": 0.07602138101341002,
+ "grad_norm": 0.8540483713150024,
+ "learning_rate": 0.00019968282880489957,
+ "loss": 1.7832,
+ "step": 76
+ },
+ {
+ "epoch": 0.07702166234253384,
+ "grad_norm": 0.8181695342063904,
+ "learning_rate": 0.00019967443184430467,
+ "loss": 1.944,
+ "step": 77
+ },
+ {
+ "epoch": 0.07802194367165766,
+ "grad_norm": 0.8446747064590454,
+ "learning_rate": 0.0001996659253595353,
+ "loss": 1.8508,
+ "step": 78
+ },
+ {
+ "epoch": 0.07902222500078147,
+ "grad_norm": 0.8280364871025085,
+ "learning_rate": 0.0001996573093599385,
+ "loss": 1.843,
+ "step": 79
+ },
+ {
+ "epoch": 0.08002250632990529,
+ "grad_norm": 0.8016006350517273,
+ "learning_rate": 0.00019964858385498172,
+ "loss": 1.9368,
+ "step": 80
+ },
+ {
+ "epoch": 0.08102278765902911,
+ "grad_norm": 0.8450536131858826,
+ "learning_rate": 0.00019963974885425266,
+ "loss": 1.9736,
+ "step": 81
+ },
+ {
+ "epoch": 0.08202306898815291,
+ "grad_norm": 0.9172171950340271,
+ "learning_rate": 0.00019963080436745945,
+ "loss": 1.9382,
+ "step": 82
+ },
+ {
+ "epoch": 0.08302335031727673,
+ "grad_norm": 0.8581916689872742,
+ "learning_rate": 0.00019962175040443044,
+ "loss": 2.224,
+ "step": 83
+ },
+ {
+ "epoch": 0.08402363164640055,
+ "grad_norm": 0.9350367188453674,
+ "learning_rate": 0.0001996125869751143,
+ "loss": 2.0519,
+ "step": 84
+ },
+ {
+ "epoch": 0.08502391297552436,
+ "grad_norm": 0.9276247620582581,
+ "learning_rate": 0.00019960331408957997,
+ "loss": 1.9657,
+ "step": 85
+ },
+ {
+ "epoch": 0.08602419430464818,
+ "grad_norm": 0.871574342250824,
+ "learning_rate": 0.00019959393175801671,
+ "loss": 1.9399,
+ "step": 86
+ },
+ {
+ "epoch": 0.087024475633772,
+ "grad_norm": 1.0662888288497925,
+ "learning_rate": 0.00019958443999073397,
+ "loss": 1.9089,
+ "step": 87
+ },
+ {
+ "epoch": 0.08802475696289581,
+ "grad_norm": 0.8258713483810425,
+ "learning_rate": 0.00019957483879816151,
+ "loss": 1.839,
+ "step": 88
+ },
+ {
+ "epoch": 0.08902503829201963,
+ "grad_norm": 0.8154664039611816,
+ "learning_rate": 0.00019956512819084928,
+ "loss": 1.8409,
+ "step": 89
+ },
+ {
+ "epoch": 0.09002531962114345,
+ "grad_norm": 0.8584638833999634,
+ "learning_rate": 0.00019955530817946748,
+ "loss": 1.9521,
+ "step": 90
+ },
+ {
+ "epoch": 0.09102560095026727,
+ "grad_norm": 0.7917523384094238,
+ "learning_rate": 0.00019954537877480655,
+ "loss": 1.9495,
+ "step": 91
+ },
+ {
+ "epoch": 0.09202588227939108,
+ "grad_norm": 1.0129039287567139,
+ "learning_rate": 0.00019953533998777706,
+ "loss": 1.949,
+ "step": 92
+ },
+ {
+ "epoch": 0.0930261636085149,
+ "grad_norm": 0.8677986264228821,
+ "learning_rate": 0.00019952519182940993,
+ "loss": 1.7875,
+ "step": 93
+ },
+ {
+ "epoch": 0.09402644493763872,
+ "grad_norm": 0.8848614692687988,
+ "learning_rate": 0.00019951493431085603,
+ "loss": 2.0675,
+ "step": 94
+ },
+ {
+ "epoch": 0.09502672626676252,
+ "grad_norm": 0.9936463832855225,
+ "learning_rate": 0.00019950456744338658,
+ "loss": 1.6761,
+ "step": 95
+ },
+ {
+ "epoch": 0.09602700759588634,
+ "grad_norm": 1.0520148277282715,
+ "learning_rate": 0.00019949409123839288,
+ "loss": 2.081,
+ "step": 96
+ },
+ {
+ "epoch": 0.09702728892501016,
+ "grad_norm": 0.8061773180961609,
+ "learning_rate": 0.00019948350570738642,
+ "loss": 1.7281,
+ "step": 97
+ },
+ {
+ "epoch": 0.09802757025413397,
+ "grad_norm": 0.7642756104469299,
+ "learning_rate": 0.0001994728108619987,
+ "loss": 2.0032,
+ "step": 98
+ },
+ {
+ "epoch": 0.09902785158325779,
+ "grad_norm": 0.8541550040245056,
+ "learning_rate": 0.0001994620067139815,
+ "loss": 2.1136,
+ "step": 99
+ },
+ {
+ "epoch": 0.1000281329123816,
+ "grad_norm": 0.7868679761886597,
+ "learning_rate": 0.00019945109327520658,
+ "loss": 1.8695,
+ "step": 100
+ },
+ {
+ "epoch": 0.10102841424150542,
+ "grad_norm": 0.8776901364326477,
+ "learning_rate": 0.00019944007055766586,
+ "loss": 1.9786,
+ "step": 101
+ },
+ {
+ "epoch": 0.10202869557062924,
+ "grad_norm": 0.9013833999633789,
+ "learning_rate": 0.00019942893857347128,
+ "loss": 2.1466,
+ "step": 102
+ },
+ {
+ "epoch": 0.10302897689975306,
+ "grad_norm": 0.957558274269104,
+ "learning_rate": 0.00019941769733485494,
+ "loss": 2.0473,
+ "step": 103
+ },
+ {
+ "epoch": 0.10402925822887688,
+ "grad_norm": 0.8921108841896057,
+ "learning_rate": 0.00019940634685416888,
+ "loss": 1.7882,
+ "step": 104
+ },
+ {
+ "epoch": 0.10502953955800069,
+ "grad_norm": 0.896019697189331,
+ "learning_rate": 0.00019939488714388524,
+ "loss": 1.8811,
+ "step": 105
+ },
+ {
+ "epoch": 0.10602982088712451,
+ "grad_norm": 0.8792067766189575,
+ "learning_rate": 0.00019938331821659614,
+ "loss": 1.8624,
+ "step": 106
+ },
+ {
+ "epoch": 0.10703010221624833,
+ "grad_norm": 0.8739930391311646,
+ "learning_rate": 0.0001993716400850138,
+ "loss": 1.8105,
+ "step": 107
+ },
+ {
+ "epoch": 0.10803038354537213,
+ "grad_norm": 0.7678424715995789,
+ "learning_rate": 0.0001993598527619703,
+ "loss": 1.8772,
+ "step": 108
+ },
+ {
+ "epoch": 0.10903066487449595,
+ "grad_norm": 0.8718745112419128,
+ "learning_rate": 0.00019934795626041783,
+ "loss": 1.8236,
+ "step": 109
+ },
+ {
+ "epoch": 0.11003094620361976,
+ "grad_norm": 0.8467247486114502,
+ "learning_rate": 0.0001993359505934285,
+ "loss": 1.8188,
+ "step": 110
+ },
+ {
+ "epoch": 0.11103122753274358,
+ "grad_norm": 0.8685783743858337,
+ "learning_rate": 0.00019932383577419432,
+ "loss": 2.0775,
+ "step": 111
+ },
+ {
+ "epoch": 0.1120315088618674,
+ "grad_norm": 0.7799698710441589,
+ "learning_rate": 0.0001993116118160273,
+ "loss": 1.6489,
+ "step": 112
+ },
+ {
+ "epoch": 0.11303179019099122,
+ "grad_norm": 0.7900094389915466,
+ "learning_rate": 0.00019929927873235938,
+ "loss": 1.8332,
+ "step": 113
+ },
+ {
+ "epoch": 0.11403207152011503,
+ "grad_norm": 0.9433258771896362,
+ "learning_rate": 0.00019928683653674237,
+ "loss": 1.9331,
+ "step": 114
+ },
+ {
+ "epoch": 0.11503235284923885,
+ "grad_norm": 0.8861056566238403,
+ "learning_rate": 0.00019927428524284805,
+ "loss": 1.9135,
+ "step": 115
+ },
+ {
+ "epoch": 0.11603263417836267,
+ "grad_norm": 0.8566756844520569,
+ "learning_rate": 0.00019926162486446792,
+ "loss": 1.9874,
+ "step": 116
+ },
+ {
+ "epoch": 0.11703291550748648,
+ "grad_norm": 0.6897929310798645,
+ "learning_rate": 0.0001992488554155135,
+ "loss": 1.946,
+ "step": 117
+ },
+ {
+ "epoch": 0.1180331968366103,
+ "grad_norm": 0.7807729244232178,
+ "learning_rate": 0.00019923597691001615,
+ "loss": 1.8127,
+ "step": 118
+ },
+ {
+ "epoch": 0.11903347816573412,
+ "grad_norm": 0.7572523355484009,
+ "learning_rate": 0.0001992229893621269,
+ "loss": 1.7768,
+ "step": 119
+ },
+ {
+ "epoch": 0.12003375949485792,
+ "grad_norm": 0.7393172979354858,
+ "learning_rate": 0.00019920989278611687,
+ "loss": 1.894,
+ "step": 120
+ },
+ {
+ "epoch": 0.12103404082398174,
+ "grad_norm": 0.866576611995697,
+ "learning_rate": 0.0001991966871963767,
+ "loss": 1.9285,
+ "step": 121
+ },
+ {
+ "epoch": 0.12203432215310556,
+ "grad_norm": 0.7326533794403076,
+ "learning_rate": 0.000199183372607417,
+ "loss": 1.9309,
+ "step": 122
+ },
+ {
+ "epoch": 0.12303460348222937,
+ "grad_norm": 0.7655537724494934,
+ "learning_rate": 0.0001991699490338681,
+ "loss": 2.1145,
+ "step": 123
+ },
+ {
+ "epoch": 0.12403488481135319,
+ "grad_norm": 0.9714633226394653,
+ "learning_rate": 0.00019915641649048005,
+ "loss": 2.0341,
+ "step": 124
+ },
+ {
+ "epoch": 0.12503516614047702,
+ "grad_norm": 0.8542420864105225,
+ "learning_rate": 0.0001991427749921227,
+ "loss": 2.1426,
+ "step": 125
+ },
+ {
+ "epoch": 0.12603544746960083,
+ "grad_norm": 0.8286274671554565,
+ "learning_rate": 0.00019912902455378556,
+ "loss": 1.8452,
+ "step": 126
+ },
+ {
+ "epoch": 0.12703572879872463,
+ "grad_norm": 0.8823768496513367,
+ "learning_rate": 0.00019911516519057788,
+ "loss": 1.8651,
+ "step": 127
+ },
+ {
+ "epoch": 0.12803601012784846,
+ "grad_norm": 0.7301567196846008,
+ "learning_rate": 0.00019910119691772863,
+ "loss": 1.7776,
+ "step": 128
+ },
+ {
+ "epoch": 0.12903629145697226,
+ "grad_norm": 0.8402552604675293,
+ "learning_rate": 0.00019908711975058637,
+ "loss": 1.8617,
+ "step": 129
+ },
+ {
+ "epoch": 0.1300365727860961,
+ "grad_norm": 0.814500093460083,
+ "learning_rate": 0.0001990729337046194,
+ "loss": 1.9156,
+ "step": 130
+ },
+ {
+ "epoch": 0.1310368541152199,
+ "grad_norm": 0.8262699246406555,
+ "learning_rate": 0.0001990586387954156,
+ "loss": 1.8659,
+ "step": 131
+ },
+ {
+ "epoch": 0.13203713544434373,
+ "grad_norm": 0.8846324682235718,
+ "learning_rate": 0.00019904423503868247,
+ "loss": 2.043,
+ "step": 132
+ },
+ {
+ "epoch": 0.13303741677346753,
+ "grad_norm": 0.8757227659225464,
+ "learning_rate": 0.00019902972245024715,
+ "loss": 1.9217,
+ "step": 133
+ },
+ {
+ "epoch": 0.13403769810259136,
+ "grad_norm": 0.8476879596710205,
+ "learning_rate": 0.00019901510104605637,
+ "loss": 1.8892,
+ "step": 134
+ },
+ {
+ "epoch": 0.13503797943171517,
+ "grad_norm": 0.7707583904266357,
+ "learning_rate": 0.00019900037084217637,
+ "loss": 1.787,
+ "step": 135
+ },
+ {
+ "epoch": 0.136038260760839,
+ "grad_norm": 0.7389562129974365,
+ "learning_rate": 0.00019898553185479303,
+ "loss": 1.5854,
+ "step": 136
+ },
+ {
+ "epoch": 0.1370385420899628,
+ "grad_norm": 0.7331375479698181,
+ "learning_rate": 0.00019897058410021167,
+ "loss": 1.997,
+ "step": 137
+ },
+ {
+ "epoch": 0.13803882341908663,
+ "grad_norm": 0.7219388484954834,
+ "learning_rate": 0.00019895552759485722,
+ "loss": 1.8337,
+ "step": 138
+ },
+ {
+ "epoch": 0.13903910474821043,
+ "grad_norm": 0.8535702228546143,
+ "learning_rate": 0.00019894036235527395,
+ "loss": 1.7818,
+ "step": 139
+ },
+ {
+ "epoch": 0.14003938607733424,
+ "grad_norm": 0.7627841830253601,
+ "learning_rate": 0.00019892508839812584,
+ "loss": 1.8173,
+ "step": 140
+ },
+ {
+ "epoch": 0.14103966740645807,
+ "grad_norm": 0.8397619724273682,
+ "learning_rate": 0.00019890970574019617,
+ "loss": 1.8735,
+ "step": 141
+ },
+ {
+ "epoch": 0.14203994873558187,
+ "grad_norm": 0.8093482851982117,
+ "learning_rate": 0.00019889421439838763,
+ "loss": 1.9918,
+ "step": 142
+ },
+ {
+ "epoch": 0.1430402300647057,
+ "grad_norm": 0.8853684067726135,
+ "learning_rate": 0.00019887861438972246,
+ "loss": 1.5825,
+ "step": 143
+ },
+ {
+ "epoch": 0.1440405113938295,
+ "grad_norm": 0.7413788437843323,
+ "learning_rate": 0.00019886290573134228,
+ "loss": 1.9068,
+ "step": 144
+ },
+ {
+ "epoch": 0.14504079272295334,
+ "grad_norm": 0.7924477458000183,
+ "learning_rate": 0.000198847088440508,
+ "loss": 1.8582,
+ "step": 145
+ },
+ {
+ "epoch": 0.14604107405207714,
+ "grad_norm": 0.8679131865501404,
+ "learning_rate": 0.0001988311625346,
+ "loss": 1.7104,
+ "step": 146
+ },
+ {
+ "epoch": 0.14704135538120097,
+ "grad_norm": 0.7480150461196899,
+ "learning_rate": 0.00019881512803111796,
+ "loss": 1.7288,
+ "step": 147
+ },
+ {
+ "epoch": 0.14804163671032478,
+ "grad_norm": 0.8382390737533569,
+ "learning_rate": 0.00019879898494768093,
+ "loss": 1.8004,
+ "step": 148
+ },
+ {
+ "epoch": 0.1490419180394486,
+ "grad_norm": 0.7360037565231323,
+ "learning_rate": 0.00019878273330202717,
+ "loss": 1.85,
+ "step": 149
+ },
+ {
+ "epoch": 0.1500421993685724,
+ "grad_norm": 0.9644019603729248,
+ "learning_rate": 0.00019876637311201433,
+ "loss": 2.065,
+ "step": 150
+ },
+ {
+ "epoch": 0.15104248069769624,
+ "grad_norm": 0.8116248250007629,
+ "learning_rate": 0.00019874990439561934,
+ "loss": 1.702,
+ "step": 151
+ },
+ {
+ "epoch": 0.15204276202682004,
+ "grad_norm": 0.9301722645759583,
+ "learning_rate": 0.0001987333271709383,
+ "loss": 1.8089,
+ "step": 152
+ },
+ {
+ "epoch": 0.15304304335594385,
+ "grad_norm": 0.7991555333137512,
+ "learning_rate": 0.00019871664145618657,
+ "loss": 1.8227,
+ "step": 153
+ },
+ {
+ "epoch": 0.15404332468506768,
+ "grad_norm": 0.8676092028617859,
+ "learning_rate": 0.00019869984726969878,
+ "loss": 1.7253,
+ "step": 154
+ },
+ {
+ "epoch": 0.15504360601419148,
+ "grad_norm": 0.8022972941398621,
+ "learning_rate": 0.00019868294462992866,
+ "loss": 1.8766,
+ "step": 155
+ },
+ {
+ "epoch": 0.1560438873433153,
+ "grad_norm": 1.128886103630066,
+ "learning_rate": 0.00019866593355544922,
+ "loss": 2.0197,
+ "step": 156
+ },
+ {
+ "epoch": 0.15704416867243912,
+ "grad_norm": 0.7420483827590942,
+ "learning_rate": 0.00019864881406495246,
+ "loss": 1.8825,
+ "step": 157
+ },
+ {
+ "epoch": 0.15804445000156295,
+ "grad_norm": 0.7797536849975586,
+ "learning_rate": 0.00019863158617724967,
+ "loss": 1.8892,
+ "step": 158
+ },
+ {
+ "epoch": 0.15904473133068675,
+ "grad_norm": 0.6859965324401855,
+ "learning_rate": 0.00019861424991127115,
+ "loss": 1.8424,
+ "step": 159
+ },
+ {
+ "epoch": 0.16004501265981058,
+ "grad_norm": 0.8115108609199524,
+ "learning_rate": 0.00019859680528606637,
+ "loss": 1.8394,
+ "step": 160
+ },
+ {
+ "epoch": 0.16104529398893438,
+ "grad_norm": 0.9756322503089905,
+ "learning_rate": 0.00019857925232080373,
+ "loss": 1.726,
+ "step": 161
+ },
+ {
+ "epoch": 0.16204557531805822,
+ "grad_norm": 0.8894350528717041,
+ "learning_rate": 0.00019856159103477086,
+ "loss": 1.8893,
+ "step": 162
+ },
+ {
+ "epoch": 0.16304585664718202,
+ "grad_norm": 0.8075819611549377,
+ "learning_rate": 0.00019854382144737426,
+ "loss": 1.6596,
+ "step": 163
+ },
+ {
+ "epoch": 0.16404613797630582,
+ "grad_norm": 0.8861923813819885,
+ "learning_rate": 0.00019852594357813952,
+ "loss": 1.9352,
+ "step": 164
+ },
+ {
+ "epoch": 0.16504641930542965,
+ "grad_norm": 0.8511936068534851,
+ "learning_rate": 0.00019850795744671116,
+ "loss": 1.9416,
+ "step": 165
+ },
+ {
+ "epoch": 0.16604670063455346,
+ "grad_norm": 0.9425658583641052,
+ "learning_rate": 0.0001984898630728527,
+ "loss": 1.9081,
+ "step": 166
+ },
+ {
+ "epoch": 0.1670469819636773,
+ "grad_norm": 0.7502055168151855,
+ "learning_rate": 0.0001984716604764466,
+ "loss": 1.703,
+ "step": 167
+ },
+ {
+ "epoch": 0.1680472632928011,
+ "grad_norm": 0.9135978817939758,
+ "learning_rate": 0.0001984533496774942,
+ "loss": 1.7641,
+ "step": 168
+ },
+ {
+ "epoch": 0.16904754462192492,
+ "grad_norm": 0.7768126726150513,
+ "learning_rate": 0.0001984349306961158,
+ "loss": 1.7053,
+ "step": 169
+ },
+ {
+ "epoch": 0.17004782595104873,
+ "grad_norm": 0.8106538653373718,
+ "learning_rate": 0.00019841640355255043,
+ "loss": 1.8646,
+ "step": 170
+ },
+ {
+ "epoch": 0.17104810728017256,
+ "grad_norm": 0.7872330546379089,
+ "learning_rate": 0.00019839776826715614,
+ "loss": 1.7814,
+ "step": 171
+ },
+ {
+ "epoch": 0.17204838860929636,
+ "grad_norm": 0.869532585144043,
+ "learning_rate": 0.00019837902486040978,
+ "loss": 1.7812,
+ "step": 172
+ },
+ {
+ "epoch": 0.1730486699384202,
+ "grad_norm": 1.015028715133667,
+ "learning_rate": 0.0001983601733529069,
+ "loss": 1.9432,
+ "step": 173
+ },
+ {
+ "epoch": 0.174048951267544,
+ "grad_norm": 0.800183117389679,
+ "learning_rate": 0.00019834121376536187,
+ "loss": 1.758,
+ "step": 174
+ },
+ {
+ "epoch": 0.17504923259666783,
+ "grad_norm": 0.7427104711532593,
+ "learning_rate": 0.00019832214611860793,
+ "loss": 1.6476,
+ "step": 175
+ },
+ {
+ "epoch": 0.17604951392579163,
+ "grad_norm": 0.8289130926132202,
+ "learning_rate": 0.00019830297043359692,
+ "loss": 1.7702,
+ "step": 176
+ },
+ {
+ "epoch": 0.17704979525491543,
+ "grad_norm": 0.8298771977424622,
+ "learning_rate": 0.00019828368673139947,
+ "loss": 1.7515,
+ "step": 177
+ },
+ {
+ "epoch": 0.17805007658403926,
+ "grad_norm": 0.7602815628051758,
+ "learning_rate": 0.0001982642950332049,
+ "loss": 1.7545,
+ "step": 178
+ },
+ {
+ "epoch": 0.17905035791316307,
+ "grad_norm": 0.8110321164131165,
+ "learning_rate": 0.00019824479536032112,
+ "loss": 2.2604,
+ "step": 179
+ },
+ {
+ "epoch": 0.1800506392422869,
+ "grad_norm": 0.882273256778717,
+ "learning_rate": 0.0001982251877341748,
+ "loss": 1.8133,
+ "step": 180
+ },
+ {
+ "epoch": 0.1810509205714107,
+ "grad_norm": 0.9015639424324036,
+ "learning_rate": 0.00019820547217631117,
+ "loss": 1.7282,
+ "step": 181
+ },
+ {
+ "epoch": 0.18205120190053453,
+ "grad_norm": 0.9231659173965454,
+ "learning_rate": 0.00019818564870839405,
+ "loss": 1.9094,
+ "step": 182
+ },
+ {
+ "epoch": 0.18305148322965833,
+ "grad_norm": 0.8110967874526978,
+ "learning_rate": 0.00019816571735220583,
+ "loss": 1.886,
+ "step": 183
+ },
+ {
+ "epoch": 0.18405176455878217,
+ "grad_norm": 0.7670036554336548,
+ "learning_rate": 0.00019814567812964748,
+ "loss": 1.9895,
+ "step": 184
+ },
+ {
+ "epoch": 0.18505204588790597,
+ "grad_norm": 0.7955975532531738,
+ "learning_rate": 0.00019812553106273847,
+ "loss": 1.8127,
+ "step": 185
+ },
+ {
+ "epoch": 0.1860523272170298,
+ "grad_norm": 0.8790062665939331,
+ "learning_rate": 0.00019810527617361681,
+ "loss": 1.8899,
+ "step": 186
+ },
+ {
+ "epoch": 0.1870526085461536,
+ "grad_norm": 0.8818586468696594,
+ "learning_rate": 0.00019808491348453894,
+ "loss": 1.7707,
+ "step": 187
+ },
+ {
+ "epoch": 0.18805288987527743,
+ "grad_norm": 0.746442437171936,
+ "learning_rate": 0.00019806444301787978,
+ "loss": 1.7281,
+ "step": 188
+ },
+ {
+ "epoch": 0.18905317120440124,
+ "grad_norm": 0.7786905169487,
+ "learning_rate": 0.0001980438647961327,
+ "loss": 1.7317,
+ "step": 189
+ },
+ {
+ "epoch": 0.19005345253352504,
+ "grad_norm": 0.9338862299919128,
+ "learning_rate": 0.00019802317884190935,
+ "loss": 1.9548,
+ "step": 190
+ },
+ {
+ "epoch": 0.19105373386264887,
+ "grad_norm": 0.7416581511497498,
+ "learning_rate": 0.00019800238517793996,
+ "loss": 1.8601,
+ "step": 191
+ },
+ {
+ "epoch": 0.19205401519177268,
+ "grad_norm": 0.6782898902893066,
+ "learning_rate": 0.00019798148382707296,
+ "loss": 1.8477,
+ "step": 192
+ },
+ {
+ "epoch": 0.1930542965208965,
+ "grad_norm": 0.7389237880706787,
+ "learning_rate": 0.00019796047481227515,
+ "loss": 1.7749,
+ "step": 193
+ },
+ {
+ "epoch": 0.1940545778500203,
+ "grad_norm": 0.9711095094680786,
+ "learning_rate": 0.00019793935815663163,
+ "loss": 2.0899,
+ "step": 194
+ },
+ {
+ "epoch": 0.19505485917914414,
+ "grad_norm": 0.7949391007423401,
+ "learning_rate": 0.00019791813388334581,
+ "loss": 1.8778,
+ "step": 195
+ },
+ {
+ "epoch": 0.19605514050826794,
+ "grad_norm": 0.8871057033538818,
+ "learning_rate": 0.00019789680201573933,
+ "loss": 1.7511,
+ "step": 196
+ },
+ {
+ "epoch": 0.19705542183739178,
+ "grad_norm": 0.8664624094963074,
+ "learning_rate": 0.00019787536257725202,
+ "loss": 1.7232,
+ "step": 197
+ },
+ {
+ "epoch": 0.19805570316651558,
+ "grad_norm": 0.871658980846405,
+ "learning_rate": 0.00019785381559144196,
+ "loss": 1.7987,
+ "step": 198
+ },
+ {
+ "epoch": 0.1990559844956394,
+ "grad_norm": 0.7748361229896545,
+ "learning_rate": 0.00019783216108198542,
+ "loss": 1.9239,
+ "step": 199
+ },
+ {
+ "epoch": 0.2000562658247632,
+ "grad_norm": 0.9393408298492432,
+ "learning_rate": 0.00019781039907267677,
+ "loss": 2.0936,
+ "step": 200
+ },
+ {
+ "epoch": 0.20105654715388704,
+ "grad_norm": 0.8519601225852966,
+ "learning_rate": 0.00019778852958742853,
+ "loss": 1.9108,
+ "step": 201
+ },
+ {
+ "epoch": 0.20205682848301085,
+ "grad_norm": 0.8464863300323486,
+ "learning_rate": 0.00019776655265027127,
+ "loss": 1.897,
+ "step": 202
+ },
+ {
+ "epoch": 0.20305710981213465,
+ "grad_norm": 0.8933351635932922,
+ "learning_rate": 0.00019774446828535371,
+ "loss": 1.8204,
+ "step": 203
+ },
+ {
+ "epoch": 0.20405739114125848,
+ "grad_norm": 0.8305785059928894,
+ "learning_rate": 0.00019772227651694256,
+ "loss": 1.9135,
+ "step": 204
+ },
+ {
+ "epoch": 0.20505767247038229,
+ "grad_norm": 0.8117037415504456,
+ "learning_rate": 0.00019769997736942258,
+ "loss": 1.7585,
+ "step": 205
+ },
+ {
+ "epoch": 0.20605795379950612,
+ "grad_norm": 0.7570348381996155,
+ "learning_rate": 0.00019767757086729647,
+ "loss": 1.8373,
+ "step": 206
+ },
+ {
+ "epoch": 0.20705823512862992,
+ "grad_norm": 0.9291234016418457,
+ "learning_rate": 0.00019765505703518496,
+ "loss": 1.7774,
+ "step": 207
+ },
+ {
+ "epoch": 0.20805851645775375,
+ "grad_norm": 0.8211004137992859,
+ "learning_rate": 0.00019763243589782662,
+ "loss": 1.8766,
+ "step": 208
+ },
+ {
+ "epoch": 0.20905879778687755,
+ "grad_norm": 0.6625431180000305,
+ "learning_rate": 0.00019760970748007803,
+ "loss": 1.628,
+ "step": 209
+ },
+ {
+ "epoch": 0.21005907911600138,
+ "grad_norm": 0.7974782586097717,
+ "learning_rate": 0.0001975868718069136,
+ "loss": 1.6896,
+ "step": 210
+ },
+ {
+ "epoch": 0.2110593604451252,
+ "grad_norm": 0.8364912867546082,
+ "learning_rate": 0.00019756392890342563,
+ "loss": 1.7492,
+ "step": 211
+ },
+ {
+ "epoch": 0.21205964177424902,
+ "grad_norm": 0.8730652332305908,
+ "learning_rate": 0.00019754087879482422,
+ "loss": 1.8295,
+ "step": 212
+ },
+ {
+ "epoch": 0.21305992310337282,
+ "grad_norm": 0.7532863020896912,
+ "learning_rate": 0.00019751772150643722,
+ "loss": 1.8309,
+ "step": 213
+ },
+ {
+ "epoch": 0.21406020443249665,
+ "grad_norm": 0.7375178933143616,
+ "learning_rate": 0.00019749445706371038,
+ "loss": 1.7854,
+ "step": 214
+ },
+ {
+ "epoch": 0.21506048576162046,
+ "grad_norm": 0.7524377703666687,
+ "learning_rate": 0.00019747108549220702,
+ "loss": 1.7683,
+ "step": 215
+ },
+ {
+ "epoch": 0.21606076709074426,
+ "grad_norm": 0.7331809997558594,
+ "learning_rate": 0.00019744760681760832,
+ "loss": 1.7103,
+ "step": 216
+ },
+ {
+ "epoch": 0.2170610484198681,
+ "grad_norm": 0.8083691596984863,
+ "learning_rate": 0.00019742402106571314,
+ "loss": 1.674,
+ "step": 217
+ },
+ {
+ "epoch": 0.2180613297489919,
+ "grad_norm": 0.8524570465087891,
+ "learning_rate": 0.00019740032826243788,
+ "loss": 1.7227,
+ "step": 218
+ },
+ {
+ "epoch": 0.21906161107811573,
+ "grad_norm": 0.7676658630371094,
+ "learning_rate": 0.0001973765284338167,
+ "loss": 1.8561,
+ "step": 219
+ },
+ {
+ "epoch": 0.22006189240723953,
+ "grad_norm": 0.7858710289001465,
+ "learning_rate": 0.00019735262160600127,
+ "loss": 1.7796,
+ "step": 220
+ },
+ {
+ "epoch": 0.22106217373636336,
+ "grad_norm": 0.7587497234344482,
+ "learning_rate": 0.00019732860780526088,
+ "loss": 1.9271,
+ "step": 221
+ },
+ {
+ "epoch": 0.22206245506548716,
+ "grad_norm": 0.8084688186645508,
+ "learning_rate": 0.00019730448705798239,
+ "loss": 1.8176,
+ "step": 222
+ },
+ {
+ "epoch": 0.223062736394611,
+ "grad_norm": 0.6736906170845032,
+ "learning_rate": 0.00019728025939067008,
+ "loss": 1.6288,
+ "step": 223
+ },
+ {
+ "epoch": 0.2240630177237348,
+ "grad_norm": 0.7483925819396973,
+ "learning_rate": 0.00019725592482994583,
+ "loss": 1.8363,
+ "step": 224
+ },
+ {
+ "epoch": 0.22506329905285863,
+ "grad_norm": 1.7995796203613281,
+ "learning_rate": 0.00019723148340254892,
+ "loss": 1.9072,
+ "step": 225
+ },
+ {
+ "epoch": 0.22606358038198243,
+ "grad_norm": 0.8028881549835205,
+ "learning_rate": 0.00019720693513533598,
+ "loss": 1.9021,
+ "step": 226
+ },
+ {
+ "epoch": 0.22706386171110624,
+ "grad_norm": 0.9853909015655518,
+ "learning_rate": 0.00019718228005528122,
+ "loss": 2.0159,
+ "step": 227
+ },
+ {
+ "epoch": 0.22806414304023007,
+ "grad_norm": 0.7784947156906128,
+ "learning_rate": 0.00019715751818947603,
+ "loss": 1.7816,
+ "step": 228
+ },
+ {
+ "epoch": 0.22906442436935387,
+ "grad_norm": 0.7447614669799805,
+ "learning_rate": 0.0001971326495651293,
+ "loss": 1.654,
+ "step": 229
+ },
+ {
+ "epoch": 0.2300647056984777,
+ "grad_norm": 0.8673064112663269,
+ "learning_rate": 0.00019710767420956705,
+ "loss": 2.0049,
+ "step": 230
+ },
+ {
+ "epoch": 0.2310649870276015,
+ "grad_norm": 0.8207747936248779,
+ "learning_rate": 0.0001970825921502328,
+ "loss": 1.9388,
+ "step": 231
+ },
+ {
+ "epoch": 0.23206526835672533,
+ "grad_norm": 0.742266058921814,
+ "learning_rate": 0.0001970574034146871,
+ "loss": 1.7658,
+ "step": 232
+ },
+ {
+ "epoch": 0.23306554968584914,
+ "grad_norm": 0.9097973704338074,
+ "learning_rate": 0.00019703210803060782,
+ "loss": 1.8023,
+ "step": 233
+ },
+ {
+ "epoch": 0.23406583101497297,
+ "grad_norm": 0.7512438297271729,
+ "learning_rate": 0.00019700670602579008,
+ "loss": 1.8551,
+ "step": 234
+ },
+ {
+ "epoch": 0.23506611234409677,
+ "grad_norm": 0.8303943872451782,
+ "learning_rate": 0.00019698119742814606,
+ "loss": 1.7723,
+ "step": 235
+ },
+ {
+ "epoch": 0.2360663936732206,
+ "grad_norm": 0.9195139408111572,
+ "learning_rate": 0.00019695558226570507,
+ "loss": 1.6426,
+ "step": 236
+ },
+ {
+ "epoch": 0.2370666750023444,
+ "grad_norm": 0.7734714150428772,
+ "learning_rate": 0.00019692986056661356,
+ "loss": 1.7798,
+ "step": 237
+ },
+ {
+ "epoch": 0.23806695633146824,
+ "grad_norm": 0.8759648203849792,
+ "learning_rate": 0.00019690403235913504,
+ "loss": 1.6465,
+ "step": 238
+ },
+ {
+ "epoch": 0.23906723766059204,
+ "grad_norm": 0.7688003778457642,
+ "learning_rate": 0.00019687809767165,
+ "loss": 2.0092,
+ "step": 239
+ },
+ {
+ "epoch": 0.24006751898971584,
+ "grad_norm": 0.7398790121078491,
+ "learning_rate": 0.000196852056532656,
+ "loss": 1.8176,
+ "step": 240
+ },
+ {
+ "epoch": 0.24106780031883968,
+ "grad_norm": 0.8921257853507996,
+ "learning_rate": 0.00019682590897076752,
+ "loss": 1.7387,
+ "step": 241
+ },
+ {
+ "epoch": 0.24206808164796348,
+ "grad_norm": 0.7939002513885498,
+ "learning_rate": 0.00019679965501471608,
+ "loss": 1.9417,
+ "step": 242
+ },
+ {
+ "epoch": 0.2430683629770873,
+ "grad_norm": 0.7798025608062744,
+ "learning_rate": 0.0001967732946933499,
+ "loss": 1.7134,
+ "step": 243
+ },
+ {
+ "epoch": 0.2440686443062111,
+ "grad_norm": 0.8007254600524902,
+ "learning_rate": 0.00019674682803563428,
+ "loss": 1.7387,
+ "step": 244
+ },
+ {
+ "epoch": 0.24506892563533494,
+ "grad_norm": 0.6257696151733398,
+ "learning_rate": 0.00019672025507065131,
+ "loss": 1.767,
+ "step": 245
+ },
+ {
+ "epoch": 0.24606920696445875,
+ "grad_norm": 0.7942785620689392,
+ "learning_rate": 0.00019669357582759983,
+ "loss": 1.8801,
+ "step": 246
+ },
+ {
+ "epoch": 0.24706948829358258,
+ "grad_norm": 0.7933829426765442,
+ "learning_rate": 0.00019666679033579552,
+ "loss": 1.9711,
+ "step": 247
+ },
+ {
+ "epoch": 0.24806976962270638,
+ "grad_norm": 0.7489326596260071,
+ "learning_rate": 0.00019663989862467082,
+ "loss": 1.8038,
+ "step": 248
+ },
+ {
+ "epoch": 0.2490700509518302,
+ "grad_norm": 0.7279101014137268,
+ "learning_rate": 0.00019661290072377482,
+ "loss": 1.66,
+ "step": 249
+ },
+ {
+ "epoch": 0.25007033228095404,
+ "grad_norm": 0.6823874115943909,
+ "learning_rate": 0.00019658579666277334,
+ "loss": 1.8064,
+ "step": 250
+ },
+ {
+ "epoch": 0.2510706136100778,
+ "grad_norm": 0.6561273336410522,
+ "learning_rate": 0.0001965585864714488,
+ "loss": 1.6874,
+ "step": 251
+ },
+ {
+ "epoch": 0.25207089493920165,
+ "grad_norm": 0.6457573175430298,
+ "learning_rate": 0.00019653127017970034,
+ "loss": 1.4587,
+ "step": 252
+ },
+ {
+ "epoch": 0.2530711762683255,
+ "grad_norm": 0.7649476528167725,
+ "learning_rate": 0.0001965038478175436,
+ "loss": 1.9811,
+ "step": 253
+ },
+ {
+ "epoch": 0.25407145759744926,
+ "grad_norm": 0.8786829710006714,
+ "learning_rate": 0.00019647631941511082,
+ "loss": 1.8629,
+ "step": 254
+ },
+ {
+ "epoch": 0.2550717389265731,
+ "grad_norm": 0.7038159966468811,
+ "learning_rate": 0.0001964486850026507,
+ "loss": 1.6885,
+ "step": 255
+ },
+ {
+ "epoch": 0.2560720202556969,
+ "grad_norm": 0.7255909442901611,
+ "learning_rate": 0.00019642094461052852,
+ "loss": 1.7335,
+ "step": 256
+ },
+ {
+ "epoch": 0.25707230158482075,
+ "grad_norm": 0.7780727744102478,
+ "learning_rate": 0.00019639309826922585,
+ "loss": 1.899,
+ "step": 257
+ },
+ {
+ "epoch": 0.2580725829139445,
+ "grad_norm": 0.8533650040626526,
+ "learning_rate": 0.0001963651460093409,
+ "loss": 1.7711,
+ "step": 258
+ },
+ {
+ "epoch": 0.25907286424306836,
+ "grad_norm": 0.6440068483352661,
+ "learning_rate": 0.00019633708786158806,
+ "loss": 1.6685,
+ "step": 259
+ },
+ {
+ "epoch": 0.2600731455721922,
+ "grad_norm": 0.6873877048492432,
+ "learning_rate": 0.00019630892385679818,
+ "loss": 1.7502,
+ "step": 260
+ },
+ {
+ "epoch": 0.261073426901316,
+ "grad_norm": 0.7100672721862793,
+ "learning_rate": 0.00019628065402591845,
+ "loss": 1.7789,
+ "step": 261
+ },
+ {
+ "epoch": 0.2620737082304398,
+ "grad_norm": 0.8447420001029968,
+ "learning_rate": 0.00019625227840001225,
+ "loss": 1.8577,
+ "step": 262
+ },
+ {
+ "epoch": 0.2630739895595636,
+ "grad_norm": 0.767888605594635,
+ "learning_rate": 0.0001962237970102593,
+ "loss": 1.5936,
+ "step": 263
+ },
+ {
+ "epoch": 0.26407427088868746,
+ "grad_norm": 0.6955805420875549,
+ "learning_rate": 0.0001961952098879555,
+ "loss": 1.7733,
+ "step": 264
+ },
+ {
+ "epoch": 0.26507455221781123,
+ "grad_norm": 0.777740478515625,
+ "learning_rate": 0.00019616651706451287,
+ "loss": 1.6027,
+ "step": 265
+ },
+ {
+ "epoch": 0.26607483354693506,
+ "grad_norm": 0.7691099047660828,
+ "learning_rate": 0.0001961377185714597,
+ "loss": 1.7457,
+ "step": 266
+ },
+ {
+ "epoch": 0.2670751148760589,
+ "grad_norm": 0.6778420805931091,
+ "learning_rate": 0.0001961088144404403,
+ "loss": 1.7704,
+ "step": 267
+ },
+ {
+ "epoch": 0.2680753962051827,
+ "grad_norm": 0.7943267226219177,
+ "learning_rate": 0.00019607980470321505,
+ "loss": 1.9775,
+ "step": 268
+ },
+ {
+ "epoch": 0.2690756775343065,
+ "grad_norm": 0.6660135388374329,
+ "learning_rate": 0.00019605068939166045,
+ "loss": 1.6556,
+ "step": 269
+ },
+ {
+ "epoch": 0.27007595886343033,
+ "grad_norm": 0.8664935827255249,
+ "learning_rate": 0.00019602146853776894,
+ "loss": 2.03,
+ "step": 270
+ },
+ {
+ "epoch": 0.27107624019255416,
+ "grad_norm": 0.7783074975013733,
+ "learning_rate": 0.000195992142173649,
+ "loss": 1.7426,
+ "step": 271
+ },
+ {
+ "epoch": 0.272076521521678,
+ "grad_norm": 0.7470223903656006,
+ "learning_rate": 0.0001959627103315249,
+ "loss": 1.7284,
+ "step": 272
+ },
+ {
+ "epoch": 0.27307680285080177,
+ "grad_norm": 0.7284931540489197,
+ "learning_rate": 0.00019593317304373705,
+ "loss": 1.6977,
+ "step": 273
+ },
+ {
+ "epoch": 0.2740770841799256,
+ "grad_norm": 0.7201762795448303,
+ "learning_rate": 0.00019590353034274144,
+ "loss": 1.7184,
+ "step": 274
+ },
+ {
+ "epoch": 0.27507736550904943,
+ "grad_norm": 0.6756151914596558,
+ "learning_rate": 0.00019587378226111014,
+ "loss": 1.7276,
+ "step": 275
+ },
+ {
+ "epoch": 0.27607764683817326,
+ "grad_norm": 0.6784201860427856,
+ "learning_rate": 0.00019584392883153088,
+ "loss": 1.642,
+ "step": 276
+ },
+ {
+ "epoch": 0.27707792816729704,
+ "grad_norm": 0.7387176752090454,
+ "learning_rate": 0.00019581397008680717,
+ "loss": 1.7911,
+ "step": 277
+ },
+ {
+ "epoch": 0.27807820949642087,
+ "grad_norm": 0.9367021918296814,
+ "learning_rate": 0.00019578390605985826,
+ "loss": 2.0034,
+ "step": 278
+ },
+ {
+ "epoch": 0.2790784908255447,
+ "grad_norm": 0.803698718547821,
+ "learning_rate": 0.00019575373678371909,
+ "loss": 1.7907,
+ "step": 279
+ },
+ {
+ "epoch": 0.2800787721546685,
+ "grad_norm": 0.7324479818344116,
+ "learning_rate": 0.00019572346229154025,
+ "loss": 1.5539,
+ "step": 280
+ },
+ {
+ "epoch": 0.2810790534837923,
+ "grad_norm": 0.7107382416725159,
+ "learning_rate": 0.00019569308261658787,
+ "loss": 1.838,
+ "step": 281
+ },
+ {
+ "epoch": 0.28207933481291614,
+ "grad_norm": 0.8698626756668091,
+ "learning_rate": 0.00019566259779224378,
+ "loss": 1.7433,
+ "step": 282
+ },
+ {
+ "epoch": 0.28307961614203997,
+ "grad_norm": 0.7804028391838074,
+ "learning_rate": 0.00019563200785200526,
+ "loss": 1.7161,
+ "step": 283
+ },
+ {
+ "epoch": 0.28407989747116374,
+ "grad_norm": 0.8762909173965454,
+ "learning_rate": 0.00019560131282948516,
+ "loss": 1.8031,
+ "step": 284
+ },
+ {
+ "epoch": 0.2850801788002876,
+ "grad_norm": 0.8252436518669128,
+ "learning_rate": 0.0001955705127584117,
+ "loss": 1.6434,
+ "step": 285
+ },
+ {
+ "epoch": 0.2860804601294114,
+ "grad_norm": 0.8220797181129456,
+ "learning_rate": 0.00019553960767262863,
+ "loss": 1.8522,
+ "step": 286
+ },
+ {
+ "epoch": 0.28708074145853524,
+ "grad_norm": 0.7883003950119019,
+ "learning_rate": 0.00019550859760609503,
+ "loss": 1.8245,
+ "step": 287
+ },
+ {
+ "epoch": 0.288081022787659,
+ "grad_norm": 0.9208703637123108,
+ "learning_rate": 0.00019547748259288536,
+ "loss": 1.8877,
+ "step": 288
+ },
+ {
+ "epoch": 0.28908130411678284,
+ "grad_norm": 0.8452202677726746,
+ "learning_rate": 0.0001954462626671894,
+ "loss": 1.554,
+ "step": 289
+ },
+ {
+ "epoch": 0.2900815854459067,
+ "grad_norm": 0.82865971326828,
+ "learning_rate": 0.0001954149378633122,
+ "loss": 1.655,
+ "step": 290
+ },
+ {
+ "epoch": 0.29108186677503045,
+ "grad_norm": 0.7871205806732178,
+ "learning_rate": 0.00019538350821567404,
+ "loss": 1.621,
+ "step": 291
+ },
+ {
+ "epoch": 0.2920821481041543,
+ "grad_norm": 0.8288848996162415,
+ "learning_rate": 0.00019535197375881045,
+ "loss": 1.9277,
+ "step": 292
+ },
+ {
+ "epoch": 0.2930824294332781,
+ "grad_norm": 0.7275516986846924,
+ "learning_rate": 0.00019532033452737205,
+ "loss": 1.7949,
+ "step": 293
+ },
+ {
+ "epoch": 0.29408271076240194,
+ "grad_norm": 0.7424570322036743,
+ "learning_rate": 0.00019528859055612468,
+ "loss": 1.6407,
+ "step": 294
+ },
+ {
+ "epoch": 0.2950829920915257,
+ "grad_norm": 0.7031363248825073,
+ "learning_rate": 0.0001952567418799492,
+ "loss": 1.8793,
+ "step": 295
+ },
+ {
+ "epoch": 0.29608327342064955,
+ "grad_norm": 0.7190185189247131,
+ "learning_rate": 0.00019522478853384155,
+ "loss": 1.6759,
+ "step": 296
+ },
+ {
+ "epoch": 0.2970835547497734,
+ "grad_norm": 0.7270736694335938,
+ "learning_rate": 0.00019519273055291266,
+ "loss": 1.6351,
+ "step": 297
+ },
+ {
+ "epoch": 0.2980838360788972,
+ "grad_norm": 0.8894152641296387,
+ "learning_rate": 0.00019516056797238846,
+ "loss": 1.7908,
+ "step": 298
+ },
+ {
+ "epoch": 0.299084117408021,
+ "grad_norm": 0.9089106321334839,
+ "learning_rate": 0.00019512830082760987,
+ "loss": 1.6018,
+ "step": 299
+ },
+ {
+ "epoch": 0.3000843987371448,
+ "grad_norm": 0.8772429823875427,
+ "learning_rate": 0.00019509592915403255,
+ "loss": 1.8474,
+ "step": 300
+ },
+ {
+ "epoch": 0.30108468006626865,
+ "grad_norm": 0.8244933485984802,
+ "learning_rate": 0.00019506345298722717,
+ "loss": 1.4324,
+ "step": 301
+ },
+ {
+ "epoch": 0.3020849613953925,
+ "grad_norm": 0.7283012866973877,
+ "learning_rate": 0.00019503087236287913,
+ "loss": 1.5115,
+ "step": 302
+ },
+ {
+ "epoch": 0.30308524272451626,
+ "grad_norm": 0.7721333503723145,
+ "learning_rate": 0.00019499818731678873,
+ "loss": 1.6728,
+ "step": 303
+ },
+ {
+ "epoch": 0.3040855240536401,
+ "grad_norm": 0.7579306960105896,
+ "learning_rate": 0.00019496539788487082,
+ "loss": 1.5927,
+ "step": 304
+ },
+ {
+ "epoch": 0.3050858053827639,
+ "grad_norm": 0.9054704308509827,
+ "learning_rate": 0.0001949325041031551,
+ "loss": 1.9027,
+ "step": 305
+ },
+ {
+ "epoch": 0.3060860867118877,
+ "grad_norm": 0.7023262977600098,
+ "learning_rate": 0.0001948995060077859,
+ "loss": 1.7705,
+ "step": 306
+ },
+ {
+ "epoch": 0.3070863680410115,
+ "grad_norm": 0.7942065000534058,
+ "learning_rate": 0.0001948664036350221,
+ "loss": 1.8269,
+ "step": 307
+ },
+ {
+ "epoch": 0.30808664937013536,
+ "grad_norm": 0.9305068850517273,
+ "learning_rate": 0.00019483319702123732,
+ "loss": 1.8247,
+ "step": 308
+ },
+ {
+ "epoch": 0.3090869306992592,
+ "grad_norm": 0.814664900302887,
+ "learning_rate": 0.00019479988620291956,
+ "loss": 1.9179,
+ "step": 309
+ },
+ {
+ "epoch": 0.31008721202838296,
+ "grad_norm": 0.6418014764785767,
+ "learning_rate": 0.00019476647121667137,
+ "loss": 1.5011,
+ "step": 310
+ },
+ {
+ "epoch": 0.3110874933575068,
+ "grad_norm": 0.7911447882652283,
+ "learning_rate": 0.00019473295209920983,
+ "loss": 1.857,
+ "step": 311
+ },
+ {
+ "epoch": 0.3120877746866306,
+ "grad_norm": 0.7792949676513672,
+ "learning_rate": 0.00019469932888736632,
+ "loss": 1.7279,
+ "step": 312
+ },
+ {
+ "epoch": 0.31308805601575446,
+ "grad_norm": 0.7579171657562256,
+ "learning_rate": 0.00019466560161808674,
+ "loss": 1.6902,
+ "step": 313
+ },
+ {
+ "epoch": 0.31408833734487823,
+ "grad_norm": 0.7052372694015503,
+ "learning_rate": 0.00019463177032843124,
+ "loss": 1.7302,
+ "step": 314
+ },
+ {
+ "epoch": 0.31508861867400206,
+ "grad_norm": 0.7188624143600464,
+ "learning_rate": 0.00019459783505557424,
+ "loss": 1.7338,
+ "step": 315
+ },
+ {
+ "epoch": 0.3160889000031259,
+ "grad_norm": 0.6057978272438049,
+ "learning_rate": 0.00019456379583680452,
+ "loss": 1.6123,
+ "step": 316
+ },
+ {
+ "epoch": 0.31708918133224967,
+ "grad_norm": 0.8339365720748901,
+ "learning_rate": 0.000194529652709525,
+ "loss": 1.9765,
+ "step": 317
+ },
+ {
+ "epoch": 0.3180894626613735,
+ "grad_norm": 0.8524260520935059,
+ "learning_rate": 0.00019449540571125286,
+ "loss": 1.6803,
+ "step": 318
+ },
+ {
+ "epoch": 0.31908974399049733,
+ "grad_norm": 0.7035975456237793,
+ "learning_rate": 0.00019446105487961926,
+ "loss": 1.5792,
+ "step": 319
+ },
+ {
+ "epoch": 0.32009002531962116,
+ "grad_norm": 0.7894249558448792,
+ "learning_rate": 0.0001944266002523696,
+ "loss": 1.6326,
+ "step": 320
+ },
+ {
+ "epoch": 0.32109030664874494,
+ "grad_norm": 0.7716989517211914,
+ "learning_rate": 0.0001943920418673633,
+ "loss": 1.6871,
+ "step": 321
+ },
+ {
+ "epoch": 0.32209058797786877,
+ "grad_norm": 0.7914933562278748,
+ "learning_rate": 0.00019435737976257377,
+ "loss": 1.7148,
+ "step": 322
+ },
+ {
+ "epoch": 0.3230908693069926,
+ "grad_norm": 0.7113205790519714,
+ "learning_rate": 0.00019432261397608834,
+ "loss": 1.5236,
+ "step": 323
+ },
+ {
+ "epoch": 0.32409115063611643,
+ "grad_norm": 0.8609917163848877,
+ "learning_rate": 0.00019428774454610843,
+ "loss": 1.8101,
+ "step": 324
+ },
+ {
+ "epoch": 0.3250914319652402,
+ "grad_norm": 0.7319685220718384,
+ "learning_rate": 0.00019425277151094913,
+ "loss": 1.7712,
+ "step": 325
+ },
+ {
+ "epoch": 0.32609171329436404,
+ "grad_norm": 0.6478747725486755,
+ "learning_rate": 0.00019421769490903957,
+ "loss": 1.8535,
+ "step": 326
+ },
+ {
+ "epoch": 0.32709199462348787,
+ "grad_norm": 0.7025763392448425,
+ "learning_rate": 0.0001941825147789225,
+ "loss": 1.9213,
+ "step": 327
+ },
+ {
+ "epoch": 0.32809227595261165,
+ "grad_norm": 0.7595239877700806,
+ "learning_rate": 0.00019414723115925456,
+ "loss": 1.7449,
+ "step": 328
+ },
+ {
+ "epoch": 0.3290925572817355,
+ "grad_norm": 0.7728105783462524,
+ "learning_rate": 0.0001941118440888061,
+ "loss": 1.8821,
+ "step": 329
+ },
+ {
+ "epoch": 0.3300928386108593,
+ "grad_norm": 0.7430977821350098,
+ "learning_rate": 0.0001940763536064611,
+ "loss": 1.6904,
+ "step": 330
+ },
+ {
+ "epoch": 0.33109311993998314,
+ "grad_norm": 0.7909367680549622,
+ "learning_rate": 0.00019404075975121716,
+ "loss": 1.7899,
+ "step": 331
+ },
+ {
+ "epoch": 0.3320934012691069,
+ "grad_norm": 0.7561226487159729,
+ "learning_rate": 0.0001940050625621855,
+ "loss": 1.7746,
+ "step": 332
+ },
+ {
+ "epoch": 0.33309368259823074,
+ "grad_norm": 0.7602452635765076,
+ "learning_rate": 0.00019396926207859084,
+ "loss": 1.7909,
+ "step": 333
+ },
+ {
+ "epoch": 0.3340939639273546,
+ "grad_norm": 0.8194379806518555,
+ "learning_rate": 0.0001939333583397715,
+ "loss": 1.7039,
+ "step": 334
+ },
+ {
+ "epoch": 0.3350942452564784,
+ "grad_norm": 0.7036342024803162,
+ "learning_rate": 0.00019389735138517915,
+ "loss": 1.6663,
+ "step": 335
+ },
+ {
+ "epoch": 0.3360945265856022,
+ "grad_norm": 0.8429521918296814,
+ "learning_rate": 0.00019386124125437895,
+ "loss": 1.589,
+ "step": 336
+ },
+ {
+ "epoch": 0.337094807914726,
+ "grad_norm": 0.7271071076393127,
+ "learning_rate": 0.00019382502798704935,
+ "loss": 1.646,
+ "step": 337
+ },
+ {
+ "epoch": 0.33809508924384984,
+ "grad_norm": 0.7862086892127991,
+ "learning_rate": 0.00019378871162298227,
+ "loss": 1.6085,
+ "step": 338
+ },
+ {
+ "epoch": 0.3390953705729737,
+ "grad_norm": 0.676815390586853,
+ "learning_rate": 0.00019375229220208276,
+ "loss": 1.7335,
+ "step": 339
+ },
+ {
+ "epoch": 0.34009565190209745,
+ "grad_norm": 0.8916042447090149,
+ "learning_rate": 0.00019371576976436917,
+ "loss": 1.7914,
+ "step": 340
+ },
+ {
+ "epoch": 0.3410959332312213,
+ "grad_norm": 0.7913751006126404,
+ "learning_rate": 0.00019367914434997312,
+ "loss": 1.6031,
+ "step": 341
+ },
+ {
+ "epoch": 0.3420962145603451,
+ "grad_norm": 0.7409866452217102,
+ "learning_rate": 0.00019364241599913924,
+ "loss": 1.6525,
+ "step": 342
+ },
+ {
+ "epoch": 0.3430964958894689,
+ "grad_norm": 0.7472705841064453,
+ "learning_rate": 0.0001936055847522254,
+ "loss": 1.6716,
+ "step": 343
+ },
+ {
+ "epoch": 0.3440967772185927,
+ "grad_norm": 0.7030773758888245,
+ "learning_rate": 0.00019356865064970244,
+ "loss": 1.7134,
+ "step": 344
+ },
+ {
+ "epoch": 0.34509705854771655,
+ "grad_norm": 0.6609564423561096,
+ "learning_rate": 0.0001935316137321543,
+ "loss": 1.7127,
+ "step": 345
+ },
+ {
+ "epoch": 0.3460973398768404,
+ "grad_norm": 0.7811393141746521,
+ "learning_rate": 0.00019349447404027782,
+ "loss": 1.75,
+ "step": 346
+ },
+ {
+ "epoch": 0.34709762120596416,
+ "grad_norm": 0.6980521082878113,
+ "learning_rate": 0.00019345723161488283,
+ "loss": 1.82,
+ "step": 347
+ },
+ {
+ "epoch": 0.348097902535088,
+ "grad_norm": 0.749796986579895,
+ "learning_rate": 0.000193419886496892,
+ "loss": 1.9755,
+ "step": 348
+ },
+ {
+ "epoch": 0.3490981838642118,
+ "grad_norm": 0.9486667513847351,
+ "learning_rate": 0.00019338243872734086,
+ "loss": 1.7047,
+ "step": 349
+ },
+ {
+ "epoch": 0.35009846519333565,
+ "grad_norm": 0.8086081147193909,
+ "learning_rate": 0.00019334488834737775,
+ "loss": 1.661,
+ "step": 350
+ },
+ {
+ "epoch": 0.3510987465224594,
+ "grad_norm": 0.700549840927124,
+ "learning_rate": 0.00019330723539826375,
+ "loss": 1.8696,
+ "step": 351
+ },
+ {
+ "epoch": 0.35209902785158326,
+ "grad_norm": 0.7465476393699646,
+ "learning_rate": 0.00019326947992137262,
+ "loss": 1.5444,
+ "step": 352
+ },
+ {
+ "epoch": 0.3530993091807071,
+ "grad_norm": 0.7370999455451965,
+ "learning_rate": 0.00019323162195819082,
+ "loss": 1.8805,
+ "step": 353
+ },
+ {
+ "epoch": 0.35409959050983086,
+ "grad_norm": 0.719359278678894,
+ "learning_rate": 0.0001931936615503174,
+ "loss": 1.8022,
+ "step": 354
+ },
+ {
+ "epoch": 0.3550998718389547,
+ "grad_norm": 0.7301434278488159,
+ "learning_rate": 0.000193155598739464,
+ "loss": 1.6984,
+ "step": 355
+ },
+ {
+ "epoch": 0.3561001531680785,
+ "grad_norm": 0.7191399335861206,
+ "learning_rate": 0.0001931174335674547,
+ "loss": 1.7229,
+ "step": 356
+ },
+ {
+ "epoch": 0.35710043449720236,
+ "grad_norm": 0.7471932768821716,
+ "learning_rate": 0.0001930791660762262,
+ "loss": 1.7408,
+ "step": 357
+ },
+ {
+ "epoch": 0.35810071582632613,
+ "grad_norm": 0.8197934031486511,
+ "learning_rate": 0.00019304079630782752,
+ "loss": 1.6938,
+ "step": 358
+ },
+ {
+ "epoch": 0.35910099715544996,
+ "grad_norm": 0.7408166527748108,
+ "learning_rate": 0.0001930023243044201,
+ "loss": 1.7798,
+ "step": 359
+ },
+ {
+ "epoch": 0.3601012784845738,
+ "grad_norm": 0.7525373101234436,
+ "learning_rate": 0.00019296375010827773,
+ "loss": 1.711,
+ "step": 360
+ },
+ {
+ "epoch": 0.3611015598136976,
+ "grad_norm": 0.6712046265602112,
+ "learning_rate": 0.00019292507376178643,
+ "loss": 1.8157,
+ "step": 361
+ },
+ {
+ "epoch": 0.3621018411428214,
+ "grad_norm": 0.6712916493415833,
+ "learning_rate": 0.00019288629530744454,
+ "loss": 1.8707,
+ "step": 362
+ },
+ {
+ "epoch": 0.36310212247194523,
+ "grad_norm": 0.6127772331237793,
+ "learning_rate": 0.0001928474147878626,
+ "loss": 1.4743,
+ "step": 363
+ },
+ {
+ "epoch": 0.36410240380106906,
+ "grad_norm": 0.910310685634613,
+ "learning_rate": 0.0001928084322457632,
+ "loss": 1.7956,
+ "step": 364
+ },
+ {
+ "epoch": 0.3651026851301929,
+ "grad_norm": 0.6267688870429993,
+ "learning_rate": 0.00019276934772398114,
+ "loss": 1.4664,
+ "step": 365
+ },
+ {
+ "epoch": 0.36610296645931667,
+ "grad_norm": 0.8317943811416626,
+ "learning_rate": 0.00019273016126546323,
+ "loss": 1.853,
+ "step": 366
+ },
+ {
+ "epoch": 0.3671032477884405,
+ "grad_norm": 0.7581344842910767,
+ "learning_rate": 0.00019269087291326833,
+ "loss": 1.9236,
+ "step": 367
+ },
+ {
+ "epoch": 0.36810352911756433,
+ "grad_norm": 0.9311390519142151,
+ "learning_rate": 0.00019265148271056722,
+ "loss": 1.7019,
+ "step": 368
+ },
+ {
+ "epoch": 0.3691038104466881,
+ "grad_norm": 0.9513958096504211,
+ "learning_rate": 0.0001926119907006426,
+ "loss": 1.7617,
+ "step": 369
+ },
+ {
+ "epoch": 0.37010409177581194,
+ "grad_norm": 0.7407613396644592,
+ "learning_rate": 0.00019257239692688907,
+ "loss": 2.1057,
+ "step": 370
+ },
+ {
+ "epoch": 0.37110437310493577,
+ "grad_norm": 0.7530227899551392,
+ "learning_rate": 0.00019253270143281296,
+ "loss": 1.9844,
+ "step": 371
+ },
+ {
+ "epoch": 0.3721046544340596,
+ "grad_norm": 0.6733037233352661,
+ "learning_rate": 0.00019249290426203252,
+ "loss": 1.8939,
+ "step": 372
+ },
+ {
+ "epoch": 0.3731049357631834,
+ "grad_norm": 0.7037007808685303,
+ "learning_rate": 0.0001924530054582776,
+ "loss": 1.6897,
+ "step": 373
+ },
+ {
+ "epoch": 0.3741052170923072,
+ "grad_norm": 0.7689145803451538,
+ "learning_rate": 0.0001924130050653898,
+ "loss": 1.6544,
+ "step": 374
+ },
+ {
+ "epoch": 0.37510549842143104,
+ "grad_norm": 0.7100968360900879,
+ "learning_rate": 0.00019237290312732226,
+ "loss": 1.7932,
+ "step": 375
+ },
+ {
+ "epoch": 0.37610577975055487,
+ "grad_norm": 0.7645193338394165,
+ "learning_rate": 0.00019233269968813984,
+ "loss": 1.6576,
+ "step": 376
+ },
+ {
+ "epoch": 0.37710606107967864,
+ "grad_norm": 0.6831678152084351,
+ "learning_rate": 0.00019229239479201876,
+ "loss": 1.6675,
+ "step": 377
+ },
+ {
+ "epoch": 0.3781063424088025,
+ "grad_norm": 0.8677794337272644,
+ "learning_rate": 0.0001922519884832469,
+ "loss": 1.6064,
+ "step": 378
+ },
+ {
+ "epoch": 0.3791066237379263,
+ "grad_norm": 0.6727691888809204,
+ "learning_rate": 0.0001922114808062234,
+ "loss": 1.6249,
+ "step": 379
+ },
+ {
+ "epoch": 0.3801069050670501,
+ "grad_norm": 0.6588670611381531,
+ "learning_rate": 0.00019217087180545893,
+ "loss": 1.5541,
+ "step": 380
+ },
+ {
+ "epoch": 0.3811071863961739,
+ "grad_norm": 0.7666369080543518,
+ "learning_rate": 0.0001921301615255754,
+ "loss": 1.7806,
+ "step": 381
+ },
+ {
+ "epoch": 0.38210746772529774,
+ "grad_norm": 0.6465156078338623,
+ "learning_rate": 0.0001920893500113061,
+ "loss": 1.512,
+ "step": 382
+ },
+ {
+ "epoch": 0.3831077490544216,
+ "grad_norm": 0.7854346632957458,
+ "learning_rate": 0.00019204843730749547,
+ "loss": 1.6857,
+ "step": 383
+ },
+ {
+ "epoch": 0.38410803038354535,
+ "grad_norm": 0.6625111103057861,
+ "learning_rate": 0.00019200742345909915,
+ "loss": 1.7033,
+ "step": 384
+ },
+ {
+ "epoch": 0.3851083117126692,
+ "grad_norm": 0.7273709177970886,
+ "learning_rate": 0.00019196630851118398,
+ "loss": 1.665,
+ "step": 385
+ },
+ {
+ "epoch": 0.386108593041793,
+ "grad_norm": 0.6861465573310852,
+ "learning_rate": 0.0001919250925089278,
+ "loss": 1.5028,
+ "step": 386
+ },
+ {
+ "epoch": 0.38710887437091684,
+ "grad_norm": 0.845456063747406,
+ "learning_rate": 0.00019188377549761963,
+ "loss": 1.967,
+ "step": 387
+ },
+ {
+ "epoch": 0.3881091557000406,
+ "grad_norm": 0.6481165289878845,
+ "learning_rate": 0.00019184235752265928,
+ "loss": 1.6053,
+ "step": 388
+ },
+ {
+ "epoch": 0.38910943702916445,
+ "grad_norm": 0.6312947273254395,
+ "learning_rate": 0.00019180083862955772,
+ "loss": 1.4427,
+ "step": 389
+ },
+ {
+ "epoch": 0.3901097183582883,
+ "grad_norm": 0.7874154448509216,
+ "learning_rate": 0.00019175921886393666,
+ "loss": 1.6099,
+ "step": 390
+ },
+ {
+ "epoch": 0.39110999968741206,
+ "grad_norm": 0.6839481592178345,
+ "learning_rate": 0.00019171749827152869,
+ "loss": 1.7004,
+ "step": 391
+ },
+ {
+ "epoch": 0.3921102810165359,
+ "grad_norm": 0.7239277362823486,
+ "learning_rate": 0.0001916756768981772,
+ "loss": 1.8813,
+ "step": 392
+ },
+ {
+ "epoch": 0.3931105623456597,
+ "grad_norm": 0.8241100311279297,
+ "learning_rate": 0.00019163375478983632,
+ "loss": 1.9443,
+ "step": 393
+ },
+ {
+ "epoch": 0.39411084367478355,
+ "grad_norm": 0.7401999235153198,
+ "learning_rate": 0.00019159173199257085,
+ "loss": 1.6663,
+ "step": 394
+ },
+ {
+ "epoch": 0.3951111250039073,
+ "grad_norm": 0.8297036290168762,
+ "learning_rate": 0.00019154960855255628,
+ "loss": 1.8012,
+ "step": 395
+ },
+ {
+ "epoch": 0.39611140633303116,
+ "grad_norm": 0.9661216735839844,
+ "learning_rate": 0.0001915073845160786,
+ "loss": 1.7007,
+ "step": 396
+ },
+ {
+ "epoch": 0.397111687662155,
+ "grad_norm": 1.4041926860809326,
+ "learning_rate": 0.00019146505992953446,
+ "loss": 1.7232,
+ "step": 397
+ },
+ {
+ "epoch": 0.3981119689912788,
+ "grad_norm": 0.8469036221504211,
+ "learning_rate": 0.00019142263483943085,
+ "loss": 1.4479,
+ "step": 398
+ },
+ {
+ "epoch": 0.3991122503204026,
+ "grad_norm": 0.9476561546325684,
+ "learning_rate": 0.00019138010929238534,
+ "loss": 1.8572,
+ "step": 399
+ },
+ {
+ "epoch": 0.4001125316495264,
+ "grad_norm": 0.7196705937385559,
+ "learning_rate": 0.00019133748333512575,
+ "loss": 1.6184,
+ "step": 400
+ },
+ {
+ "epoch": 0.40111281297865026,
+ "grad_norm": 0.8957480192184448,
+ "learning_rate": 0.00019129475701449035,
+ "loss": 1.762,
+ "step": 401
+ },
+ {
+ "epoch": 0.4021130943077741,
+ "grad_norm": 0.7850635647773743,
+ "learning_rate": 0.0001912519303774276,
+ "loss": 1.6764,
+ "step": 402
+ },
+ {
+ "epoch": 0.40311337563689786,
+ "grad_norm": 0.7579814791679382,
+ "learning_rate": 0.0001912090034709963,
+ "loss": 1.6231,
+ "step": 403
+ },
+ {
+ "epoch": 0.4041136569660217,
+ "grad_norm": 0.7173107266426086,
+ "learning_rate": 0.00019116597634236525,
+ "loss": 1.7107,
+ "step": 404
+ },
+ {
+ "epoch": 0.4051139382951455,
+ "grad_norm": 0.7832950353622437,
+ "learning_rate": 0.0001911228490388136,
+ "loss": 1.8608,
+ "step": 405
+ },
+ {
+ "epoch": 0.4061142196242693,
+ "grad_norm": 0.716299295425415,
+ "learning_rate": 0.00019107962160773035,
+ "loss": 1.652,
+ "step": 406
+ },
+ {
+ "epoch": 0.40711450095339313,
+ "grad_norm": 0.6675253510475159,
+ "learning_rate": 0.0001910362940966147,
+ "loss": 1.5963,
+ "step": 407
+ },
+ {
+ "epoch": 0.40811478228251696,
+ "grad_norm": 0.6555336713790894,
+ "learning_rate": 0.00019099286655307568,
+ "loss": 1.4991,
+ "step": 408
+ },
+ {
+ "epoch": 0.4091150636116408,
+ "grad_norm": 0.7307867407798767,
+ "learning_rate": 0.0001909493390248324,
+ "loss": 1.8221,
+ "step": 409
+ },
+ {
+ "epoch": 0.41011534494076457,
+ "grad_norm": 0.6557430624961853,
+ "learning_rate": 0.00019090571155971366,
+ "loss": 1.6484,
+ "step": 410
+ },
+ {
+ "epoch": 0.4111156262698884,
+ "grad_norm": 0.6816605925559998,
+ "learning_rate": 0.00019086198420565823,
+ "loss": 1.5052,
+ "step": 411
+ },
+ {
+ "epoch": 0.41211590759901223,
+ "grad_norm": 0.66513592004776,
+ "learning_rate": 0.00019081815701071445,
+ "loss": 1.8818,
+ "step": 412
+ },
+ {
+ "epoch": 0.41311618892813606,
+ "grad_norm": 0.6807469129562378,
+ "learning_rate": 0.0001907742300230406,
+ "loss": 1.5997,
+ "step": 413
+ },
+ {
+ "epoch": 0.41411647025725984,
+ "grad_norm": 0.8060654401779175,
+ "learning_rate": 0.00019073020329090444,
+ "loss": 1.8099,
+ "step": 414
+ },
+ {
+ "epoch": 0.41511675158638367,
+ "grad_norm": 0.7440110445022583,
+ "learning_rate": 0.0001906860768626834,
+ "loss": 1.4876,
+ "step": 415
+ },
+ {
+ "epoch": 0.4161170329155075,
+ "grad_norm": 0.7675415277481079,
+ "learning_rate": 0.00019064185078686443,
+ "loss": 1.4722,
+ "step": 416
+ },
+ {
+ "epoch": 0.4171173142446313,
+ "grad_norm": 0.6656553149223328,
+ "learning_rate": 0.000190597525112044,
+ "loss": 1.6453,
+ "step": 417
+ },
+ {
+ "epoch": 0.4181175955737551,
+ "grad_norm": 0.730689287185669,
+ "learning_rate": 0.000190553099886928,
+ "loss": 1.6584,
+ "step": 418
+ },
+ {
+ "epoch": 0.41911787690287894,
+ "grad_norm": 0.8425858616828918,
+ "learning_rate": 0.00019050857516033173,
+ "loss": 1.6249,
+ "step": 419
+ },
+ {
+ "epoch": 0.42011815823200277,
+ "grad_norm": 0.7816892266273499,
+ "learning_rate": 0.00019046395098117983,
+ "loss": 1.7532,
+ "step": 420
+ },
+ {
+ "epoch": 0.42111843956112655,
+ "grad_norm": 0.7324026823043823,
+ "learning_rate": 0.00019041922739850616,
+ "loss": 1.8523,
+ "step": 421
+ },
+ {
+ "epoch": 0.4221187208902504,
+ "grad_norm": 0.7473389506340027,
+ "learning_rate": 0.00019037440446145385,
+ "loss": 1.583,
+ "step": 422
+ },
+ {
+ "epoch": 0.4231190022193742,
+ "grad_norm": 0.8720895051956177,
+ "learning_rate": 0.00019032948221927524,
+ "loss": 1.6806,
+ "step": 423
+ },
+ {
+ "epoch": 0.42411928354849804,
+ "grad_norm": 0.728528618812561,
+ "learning_rate": 0.00019028446072133175,
+ "loss": 1.7283,
+ "step": 424
+ },
+ {
+ "epoch": 0.4251195648776218,
+ "grad_norm": 0.739930272102356,
+ "learning_rate": 0.00019023934001709383,
+ "loss": 1.7244,
+ "step": 425
+ },
+ {
+ "epoch": 0.42611984620674564,
+ "grad_norm": 0.7825399041175842,
+ "learning_rate": 0.00019019412015614098,
+ "loss": 1.7871,
+ "step": 426
+ },
+ {
+ "epoch": 0.4271201275358695,
+ "grad_norm": 0.8878734707832336,
+ "learning_rate": 0.00019014880118816164,
+ "loss": 1.6018,
+ "step": 427
+ },
+ {
+ "epoch": 0.4281204088649933,
+ "grad_norm": 0.726259708404541,
+ "learning_rate": 0.0001901033831629532,
+ "loss": 1.7732,
+ "step": 428
+ },
+ {
+ "epoch": 0.4291206901941171,
+ "grad_norm": 0.7620319724082947,
+ "learning_rate": 0.00019005786613042185,
+ "loss": 1.6466,
+ "step": 429
+ },
+ {
+ "epoch": 0.4301209715232409,
+ "grad_norm": 0.7295501828193665,
+ "learning_rate": 0.00019001225014058255,
+ "loss": 1.8708,
+ "step": 430
+ },
+ {
+ "epoch": 0.43112125285236474,
+ "grad_norm": 0.7419458031654358,
+ "learning_rate": 0.00018996653524355902,
+ "loss": 1.6583,
+ "step": 431
+ },
+ {
+ "epoch": 0.4321215341814885,
+ "grad_norm": 0.7701705098152161,
+ "learning_rate": 0.00018992072148958368,
+ "loss": 1.4421,
+ "step": 432
+ },
+ {
+ "epoch": 0.43312181551061235,
+ "grad_norm": 0.8237659931182861,
+ "learning_rate": 0.00018987480892899758,
+ "loss": 1.844,
+ "step": 433
+ },
+ {
+ "epoch": 0.4341220968397362,
+ "grad_norm": 0.6167672276496887,
+ "learning_rate": 0.00018982879761225027,
+ "loss": 1.6193,
+ "step": 434
+ },
+ {
+ "epoch": 0.43512237816886,
+ "grad_norm": 0.7565534710884094,
+ "learning_rate": 0.00018978268758989991,
+ "loss": 1.7655,
+ "step": 435
+ },
+ {
+ "epoch": 0.4361226594979838,
+ "grad_norm": 0.8333333730697632,
+ "learning_rate": 0.00018973647891261307,
+ "loss": 1.5764,
+ "step": 436
+ },
+ {
+ "epoch": 0.4371229408271076,
+ "grad_norm": 0.7404434084892273,
+ "learning_rate": 0.00018969017163116472,
+ "loss": 1.7922,
+ "step": 437
+ },
+ {
+ "epoch": 0.43812322215623145,
+ "grad_norm": 0.7129400372505188,
+ "learning_rate": 0.0001896437657964382,
+ "loss": 1.6925,
+ "step": 438
+ },
+ {
+ "epoch": 0.4391235034853553,
+ "grad_norm": 0.7750307321548462,
+ "learning_rate": 0.00018959726145942508,
+ "loss": 1.8133,
+ "step": 439
+ },
+ {
+ "epoch": 0.44012378481447906,
+ "grad_norm": 0.7244920134544373,
+ "learning_rate": 0.00018955065867122528,
+ "loss": 1.6425,
+ "step": 440
+ },
+ {
+ "epoch": 0.4411240661436029,
+ "grad_norm": 0.7624644637107849,
+ "learning_rate": 0.00018950395748304678,
+ "loss": 1.886,
+ "step": 441
+ },
+ {
+ "epoch": 0.4421243474727267,
+ "grad_norm": 0.7016286849975586,
+ "learning_rate": 0.0001894571579462058,
+ "loss": 1.7308,
+ "step": 442
+ },
+ {
+ "epoch": 0.4431246288018505,
+ "grad_norm": 0.6965353488922119,
+ "learning_rate": 0.00018941026011212654,
+ "loss": 1.5872,
+ "step": 443
+ },
+ {
+ "epoch": 0.4441249101309743,
+ "grad_norm": 0.7479360699653625,
+ "learning_rate": 0.00018936326403234125,
+ "loss": 1.8129,
+ "step": 444
+ },
+ {
+ "epoch": 0.44512519146009816,
+ "grad_norm": 0.7027686834335327,
+ "learning_rate": 0.00018931616975849006,
+ "loss": 1.6433,
+ "step": 445
+ },
+ {
+ "epoch": 0.446125472789222,
+ "grad_norm": 0.7771592140197754,
+ "learning_rate": 0.00018926897734232115,
+ "loss": 1.4645,
+ "step": 446
+ },
+ {
+ "epoch": 0.44712575411834576,
+ "grad_norm": 0.7766458988189697,
+ "learning_rate": 0.0001892216868356904,
+ "loss": 1.7873,
+ "step": 447
+ },
+ {
+ "epoch": 0.4481260354474696,
+ "grad_norm": 0.8146182298660278,
+ "learning_rate": 0.0001891742982905615,
+ "loss": 1.7935,
+ "step": 448
+ },
+ {
+ "epoch": 0.4491263167765934,
+ "grad_norm": 0.6744781136512756,
+ "learning_rate": 0.00018912681175900598,
+ "loss": 1.8916,
+ "step": 449
+ },
+ {
+ "epoch": 0.45012659810571726,
+ "grad_norm": 0.6259024143218994,
+ "learning_rate": 0.00018907922729320285,
+ "loss": 1.6616,
+ "step": 450
+ },
+ {
+ "epoch": 0.45112687943484103,
+ "grad_norm": 0.7717494368553162,
+ "learning_rate": 0.00018903154494543889,
+ "loss": 1.817,
+ "step": 451
+ },
+ {
+ "epoch": 0.45212716076396486,
+ "grad_norm": 0.648040771484375,
+ "learning_rate": 0.00018898376476810834,
+ "loss": 1.6309,
+ "step": 452
+ },
+ {
+ "epoch": 0.4531274420930887,
+ "grad_norm": 0.7560017704963684,
+ "learning_rate": 0.00018893588681371303,
+ "loss": 1.8016,
+ "step": 453
+ },
+ {
+ "epoch": 0.45412772342221247,
+ "grad_norm": 0.8778628706932068,
+ "learning_rate": 0.00018888791113486213,
+ "loss": 1.7797,
+ "step": 454
+ },
+ {
+ "epoch": 0.4551280047513363,
+ "grad_norm": 0.6795655488967896,
+ "learning_rate": 0.00018883983778427227,
+ "loss": 1.6343,
+ "step": 455
+ },
+ {
+ "epoch": 0.45612828608046013,
+ "grad_norm": 0.6690213084220886,
+ "learning_rate": 0.0001887916668147673,
+ "loss": 1.6224,
+ "step": 456
+ },
+ {
+ "epoch": 0.45712856740958396,
+ "grad_norm": 0.7529327869415283,
+ "learning_rate": 0.00018874339827927846,
+ "loss": 1.6396,
+ "step": 457
+ },
+ {
+ "epoch": 0.45812884873870774,
+ "grad_norm": 0.8393098711967468,
+ "learning_rate": 0.00018869503223084414,
+ "loss": 1.8374,
+ "step": 458
+ },
+ {
+ "epoch": 0.45912913006783157,
+ "grad_norm": 0.7435776591300964,
+ "learning_rate": 0.00018864656872260985,
+ "loss": 1.7363,
+ "step": 459
+ },
+ {
+ "epoch": 0.4601294113969554,
+ "grad_norm": 0.6737055778503418,
+ "learning_rate": 0.00018859800780782828,
+ "loss": 1.6661,
+ "step": 460
+ },
+ {
+ "epoch": 0.46112969272607923,
+ "grad_norm": 0.7674340605735779,
+ "learning_rate": 0.000188549349539859,
+ "loss": 1.6269,
+ "step": 461
+ },
+ {
+ "epoch": 0.462129974055203,
+ "grad_norm": 0.7329950928688049,
+ "learning_rate": 0.00018850059397216876,
+ "loss": 1.6989,
+ "step": 462
+ },
+ {
+ "epoch": 0.46313025538432684,
+ "grad_norm": 0.7075778841972351,
+ "learning_rate": 0.00018845174115833099,
+ "loss": 1.7286,
+ "step": 463
+ },
+ {
+ "epoch": 0.46413053671345067,
+ "grad_norm": 0.7973611354827881,
+ "learning_rate": 0.0001884027911520262,
+ "loss": 1.7478,
+ "step": 464
+ },
+ {
+ "epoch": 0.4651308180425745,
+ "grad_norm": 0.7790057063102722,
+ "learning_rate": 0.00018835374400704154,
+ "loss": 1.6659,
+ "step": 465
+ },
+ {
+ "epoch": 0.4661310993716983,
+ "grad_norm": 0.8505310416221619,
+ "learning_rate": 0.00018830459977727096,
+ "loss": 1.6879,
+ "step": 466
+ },
+ {
+ "epoch": 0.4671313807008221,
+ "grad_norm": 0.7616267800331116,
+ "learning_rate": 0.0001882553585167151,
+ "loss": 1.6525,
+ "step": 467
+ },
+ {
+ "epoch": 0.46813166202994594,
+ "grad_norm": 0.8038133978843689,
+ "learning_rate": 0.00018820602027948114,
+ "loss": 1.7929,
+ "step": 468
+ },
+ {
+ "epoch": 0.4691319433590697,
+ "grad_norm": 0.6762365698814392,
+ "learning_rate": 0.00018815658511978298,
+ "loss": 1.6926,
+ "step": 469
+ },
+ {
+ "epoch": 0.47013222468819355,
+ "grad_norm": 0.6515015959739685,
+ "learning_rate": 0.00018810705309194083,
+ "loss": 1.7817,
+ "step": 470
+ },
+ {
+ "epoch": 0.4711325060173174,
+ "grad_norm": 0.696675181388855,
+ "learning_rate": 0.00018805742425038145,
+ "loss": 1.7195,
+ "step": 471
+ },
+ {
+ "epoch": 0.4721327873464412,
+ "grad_norm": 0.7929533123970032,
+ "learning_rate": 0.00018800769864963802,
+ "loss": 2.0165,
+ "step": 472
+ },
+ {
+ "epoch": 0.473133068675565,
+ "grad_norm": 0.7223886251449585,
+ "learning_rate": 0.00018795787634434994,
+ "loss": 1.6708,
+ "step": 473
+ },
+ {
+ "epoch": 0.4741333500046888,
+ "grad_norm": 0.7982028126716614,
+ "learning_rate": 0.0001879079573892629,
+ "loss": 1.628,
+ "step": 474
+ },
+ {
+ "epoch": 0.47513363133381264,
+ "grad_norm": 0.6962152123451233,
+ "learning_rate": 0.00018785794183922883,
+ "loss": 1.6429,
+ "step": 475
+ },
+ {
+ "epoch": 0.4761339126629365,
+ "grad_norm": 0.687489926815033,
+ "learning_rate": 0.00018780782974920572,
+ "loss": 1.4546,
+ "step": 476
+ },
+ {
+ "epoch": 0.47713419399206025,
+ "grad_norm": 0.7260375022888184,
+ "learning_rate": 0.00018775762117425777,
+ "loss": 1.739,
+ "step": 477
+ },
+ {
+ "epoch": 0.4781344753211841,
+ "grad_norm": 0.759400486946106,
+ "learning_rate": 0.0001877073161695551,
+ "loss": 1.6465,
+ "step": 478
+ },
+ {
+ "epoch": 0.4791347566503079,
+ "grad_norm": 0.7412364482879639,
+ "learning_rate": 0.00018765691479037376,
+ "loss": 1.7333,
+ "step": 479
+ },
+ {
+ "epoch": 0.4801350379794317,
+ "grad_norm": 0.6909674406051636,
+ "learning_rate": 0.00018760641709209583,
+ "loss": 1.6936,
+ "step": 480
+ },
+ {
+ "epoch": 0.4811353193085555,
+ "grad_norm": 0.6478050947189331,
+ "learning_rate": 0.0001875558231302091,
+ "loss": 1.4435,
+ "step": 481
+ },
+ {
+ "epoch": 0.48213560063767935,
+ "grad_norm": 0.6662638187408447,
+ "learning_rate": 0.00018750513296030718,
+ "loss": 1.5567,
+ "step": 482
+ },
+ {
+ "epoch": 0.4831358819668032,
+ "grad_norm": 0.6973413825035095,
+ "learning_rate": 0.00018745434663808942,
+ "loss": 1.6434,
+ "step": 483
+ },
+ {
+ "epoch": 0.48413616329592696,
+ "grad_norm": 0.8381956815719604,
+ "learning_rate": 0.0001874034642193608,
+ "loss": 1.8568,
+ "step": 484
+ },
+ {
+ "epoch": 0.4851364446250508,
+ "grad_norm": 0.6522262096405029,
+ "learning_rate": 0.0001873524857600319,
+ "loss": 1.4265,
+ "step": 485
+ },
+ {
+ "epoch": 0.4861367259541746,
+ "grad_norm": 0.7018440961837769,
+ "learning_rate": 0.00018730141131611882,
+ "loss": 1.6914,
+ "step": 486
+ },
+ {
+ "epoch": 0.48713700728329845,
+ "grad_norm": 0.8237236142158508,
+ "learning_rate": 0.00018725024094374315,
+ "loss": 1.4462,
+ "step": 487
+ },
+ {
+ "epoch": 0.4881372886124222,
+ "grad_norm": 0.6507928967475891,
+ "learning_rate": 0.00018719897469913184,
+ "loss": 1.5802,
+ "step": 488
+ },
+ {
+ "epoch": 0.48913756994154606,
+ "grad_norm": 0.8120100498199463,
+ "learning_rate": 0.00018714761263861728,
+ "loss": 1.7819,
+ "step": 489
+ },
+ {
+ "epoch": 0.4901378512706699,
+ "grad_norm": 0.759722888469696,
+ "learning_rate": 0.000187096154818637,
+ "loss": 1.7481,
+ "step": 490
+ },
+ {
+ "epoch": 0.4911381325997937,
+ "grad_norm": 0.7146822214126587,
+ "learning_rate": 0.00018704460129573391,
+ "loss": 1.6217,
+ "step": 491
+ },
+ {
+ "epoch": 0.4921384139289175,
+ "grad_norm": 0.7138429284095764,
+ "learning_rate": 0.00018699295212655596,
+ "loss": 1.7242,
+ "step": 492
+ },
+ {
+ "epoch": 0.4931386952580413,
+ "grad_norm": 0.8145613670349121,
+ "learning_rate": 0.00018694120736785632,
+ "loss": 1.8759,
+ "step": 493
+ },
+ {
+ "epoch": 0.49413897658716516,
+ "grad_norm": 0.6624435186386108,
+ "learning_rate": 0.00018688936707649304,
+ "loss": 1.8632,
+ "step": 494
+ },
+ {
+ "epoch": 0.49513925791628893,
+ "grad_norm": 0.6550843119621277,
+ "learning_rate": 0.00018683743130942928,
+ "loss": 1.7645,
+ "step": 495
+ },
+ {
+ "epoch": 0.49613953924541276,
+ "grad_norm": 0.6931488513946533,
+ "learning_rate": 0.00018678540012373302,
+ "loss": 1.5899,
+ "step": 496
+ },
+ {
+ "epoch": 0.4971398205745366,
+ "grad_norm": 0.684229850769043,
+ "learning_rate": 0.00018673327357657715,
+ "loss": 1.6902,
+ "step": 497
+ },
+ {
+ "epoch": 0.4981401019036604,
+ "grad_norm": 0.7380666136741638,
+ "learning_rate": 0.0001866810517252393,
+ "loss": 1.7115,
+ "step": 498
+ },
+ {
+ "epoch": 0.4991403832327842,
+ "grad_norm": 0.6782827973365784,
+ "learning_rate": 0.00018662873462710184,
+ "loss": 1.495,
+ "step": 499
+ },
+ {
+ "epoch": 0.5001406645619081,
+ "grad_norm": 0.730248749256134,
+ "learning_rate": 0.0001865763223396518,
+ "loss": 1.5147,
+ "step": 500
+ },
+ {
+ "epoch": 0.5011409458910319,
+ "grad_norm": 0.7644149661064148,
+ "learning_rate": 0.00018652381492048083,
+ "loss": 1.7278,
+ "step": 501
+ },
+ {
+ "epoch": 0.5021412272201556,
+ "grad_norm": 0.6977668404579163,
+ "learning_rate": 0.00018647121242728506,
+ "loss": 1.5194,
+ "step": 502
+ },
+ {
+ "epoch": 0.5031415085492795,
+ "grad_norm": 0.7714502215385437,
+ "learning_rate": 0.00018641851491786512,
+ "loss": 2.0039,
+ "step": 503
+ },
+ {
+ "epoch": 0.5041417898784033,
+ "grad_norm": 0.9013757109642029,
+ "learning_rate": 0.00018636572245012606,
+ "loss": 1.8069,
+ "step": 504
+ },
+ {
+ "epoch": 0.5051420712075271,
+ "grad_norm": 0.7173192501068115,
+ "learning_rate": 0.00018631283508207725,
+ "loss": 1.841,
+ "step": 505
+ },
+ {
+ "epoch": 0.506142352536651,
+ "grad_norm": 0.7639481425285339,
+ "learning_rate": 0.00018625985287183233,
+ "loss": 1.5333,
+ "step": 506
+ },
+ {
+ "epoch": 0.5071426338657747,
+ "grad_norm": 0.8779808282852173,
+ "learning_rate": 0.00018620677587760916,
+ "loss": 1.8304,
+ "step": 507
+ },
+ {
+ "epoch": 0.5081429151948985,
+ "grad_norm": 0.8956230282783508,
+ "learning_rate": 0.00018615360415772978,
+ "loss": 1.7228,
+ "step": 508
+ },
+ {
+ "epoch": 0.5091431965240224,
+ "grad_norm": 0.8217945098876953,
+ "learning_rate": 0.00018610033777062025,
+ "loss": 1.4977,
+ "step": 509
+ },
+ {
+ "epoch": 0.5101434778531462,
+ "grad_norm": 0.7628902196884155,
+ "learning_rate": 0.0001860469767748108,
+ "loss": 1.7634,
+ "step": 510
+ },
+ {
+ "epoch": 0.5111437591822701,
+ "grad_norm": 0.635911226272583,
+ "learning_rate": 0.00018599352122893539,
+ "loss": 1.5103,
+ "step": 511
+ },
+ {
+ "epoch": 0.5121440405113938,
+ "grad_norm": 0.6439646482467651,
+ "learning_rate": 0.00018593997119173205,
+ "loss": 1.7281,
+ "step": 512
+ },
+ {
+ "epoch": 0.5131443218405176,
+ "grad_norm": 0.7753567099571228,
+ "learning_rate": 0.00018588632672204264,
+ "loss": 1.9028,
+ "step": 513
+ },
+ {
+ "epoch": 0.5141446031696415,
+ "grad_norm": 0.8296717405319214,
+ "learning_rate": 0.0001858325878788126,
+ "loss": 1.9049,
+ "step": 514
+ },
+ {
+ "epoch": 0.5151448844987653,
+ "grad_norm": 0.7379167079925537,
+ "learning_rate": 0.00018577875472109134,
+ "loss": 1.6262,
+ "step": 515
+ },
+ {
+ "epoch": 0.516145165827889,
+ "grad_norm": 0.634040355682373,
+ "learning_rate": 0.0001857248273080317,
+ "loss": 1.5416,
+ "step": 516
+ },
+ {
+ "epoch": 0.5171454471570129,
+ "grad_norm": 0.7394217252731323,
+ "learning_rate": 0.00018567080569889015,
+ "loss": 1.6035,
+ "step": 517
+ },
+ {
+ "epoch": 0.5181457284861367,
+ "grad_norm": 0.705426037311554,
+ "learning_rate": 0.00018561668995302667,
+ "loss": 1.616,
+ "step": 518
+ },
+ {
+ "epoch": 0.5191460098152605,
+ "grad_norm": 0.778021514415741,
+ "learning_rate": 0.00018556248012990468,
+ "loss": 1.5206,
+ "step": 519
+ },
+ {
+ "epoch": 0.5201462911443844,
+ "grad_norm": 0.7203211188316345,
+ "learning_rate": 0.000185508176289091,
+ "loss": 1.5369,
+ "step": 520
+ },
+ {
+ "epoch": 0.5211465724735082,
+ "grad_norm": 0.7390999794006348,
+ "learning_rate": 0.00018545377849025566,
+ "loss": 1.6438,
+ "step": 521
+ },
+ {
+ "epoch": 0.522146853802632,
+ "grad_norm": 0.6779179573059082,
+ "learning_rate": 0.0001853992867931721,
+ "loss": 1.6268,
+ "step": 522
+ },
+ {
+ "epoch": 0.5231471351317558,
+ "grad_norm": 0.6589105725288391,
+ "learning_rate": 0.00018534470125771674,
+ "loss": 1.8353,
+ "step": 523
+ },
+ {
+ "epoch": 0.5241474164608796,
+ "grad_norm": 0.692081868648529,
+ "learning_rate": 0.0001852900219438693,
+ "loss": 1.7047,
+ "step": 524
+ },
+ {
+ "epoch": 0.5251476977900035,
+ "grad_norm": 0.6639224886894226,
+ "learning_rate": 0.0001852352489117124,
+ "loss": 1.7448,
+ "step": 525
+ },
+ {
+ "epoch": 0.5261479791191273,
+ "grad_norm": 0.7168188095092773,
+ "learning_rate": 0.00018518038222143174,
+ "loss": 1.6734,
+ "step": 526
+ },
+ {
+ "epoch": 0.527148260448251,
+ "grad_norm": 0.7671873569488525,
+ "learning_rate": 0.00018512542193331583,
+ "loss": 1.9392,
+ "step": 527
+ },
+ {
+ "epoch": 0.5281485417773749,
+ "grad_norm": 0.7861583828926086,
+ "learning_rate": 0.00018507036810775615,
+ "loss": 1.5749,
+ "step": 528
+ },
+ {
+ "epoch": 0.5291488231064987,
+ "grad_norm": 0.6727952361106873,
+ "learning_rate": 0.00018501522080524688,
+ "loss": 1.7584,
+ "step": 529
+ },
+ {
+ "epoch": 0.5301491044356225,
+ "grad_norm": 0.7287748456001282,
+ "learning_rate": 0.0001849599800863849,
+ "loss": 1.783,
+ "step": 530
+ },
+ {
+ "epoch": 0.5311493857647464,
+ "grad_norm": 0.6883361339569092,
+ "learning_rate": 0.0001849046460118698,
+ "loss": 1.6104,
+ "step": 531
+ },
+ {
+ "epoch": 0.5321496670938701,
+ "grad_norm": 0.6767789125442505,
+ "learning_rate": 0.0001848492186425037,
+ "loss": 1.8416,
+ "step": 532
+ },
+ {
+ "epoch": 0.533149948422994,
+ "grad_norm": 0.7468088269233704,
+ "learning_rate": 0.0001847936980391913,
+ "loss": 1.8297,
+ "step": 533
+ },
+ {
+ "epoch": 0.5341502297521178,
+ "grad_norm": 0.7560007572174072,
+ "learning_rate": 0.00018473808426293964,
+ "loss": 1.5405,
+ "step": 534
+ },
+ {
+ "epoch": 0.5351505110812416,
+ "grad_norm": 0.6900463104248047,
+ "learning_rate": 0.00018468237737485823,
+ "loss": 1.5117,
+ "step": 535
+ },
+ {
+ "epoch": 0.5361507924103655,
+ "grad_norm": 0.8691229820251465,
+ "learning_rate": 0.00018462657743615888,
+ "loss": 1.724,
+ "step": 536
+ },
+ {
+ "epoch": 0.5371510737394892,
+ "grad_norm": 0.7081372141838074,
+ "learning_rate": 0.00018457068450815562,
+ "loss": 1.6418,
+ "step": 537
+ },
+ {
+ "epoch": 0.538151355068613,
+ "grad_norm": 0.8148525953292847,
+ "learning_rate": 0.00018451469865226464,
+ "loss": 1.8564,
+ "step": 538
+ },
+ {
+ "epoch": 0.5391516363977369,
+ "grad_norm": 0.7306076288223267,
+ "learning_rate": 0.00018445861993000436,
+ "loss": 1.4974,
+ "step": 539
+ },
+ {
+ "epoch": 0.5401519177268607,
+ "grad_norm": 0.815825343132019,
+ "learning_rate": 0.00018440244840299506,
+ "loss": 1.8965,
+ "step": 540
+ },
+ {
+ "epoch": 0.5411521990559846,
+ "grad_norm": 0.753034234046936,
+ "learning_rate": 0.0001843461841329591,
+ "loss": 2.016,
+ "step": 541
+ },
+ {
+ "epoch": 0.5421524803851083,
+ "grad_norm": 0.8658022284507751,
+ "learning_rate": 0.0001842898271817208,
+ "loss": 1.6697,
+ "step": 542
+ },
+ {
+ "epoch": 0.5431527617142321,
+ "grad_norm": 0.7143135666847229,
+ "learning_rate": 0.00018423337761120618,
+ "loss": 1.8741,
+ "step": 543
+ },
+ {
+ "epoch": 0.544153043043356,
+ "grad_norm": 0.6843370795249939,
+ "learning_rate": 0.00018417683548344318,
+ "loss": 1.763,
+ "step": 544
+ },
+ {
+ "epoch": 0.5451533243724798,
+ "grad_norm": 0.6699584126472473,
+ "learning_rate": 0.00018412020086056133,
+ "loss": 1.7126,
+ "step": 545
+ },
+ {
+ "epoch": 0.5461536057016035,
+ "grad_norm": 0.6921600699424744,
+ "learning_rate": 0.0001840634738047918,
+ "loss": 1.6697,
+ "step": 546
+ },
+ {
+ "epoch": 0.5471538870307274,
+ "grad_norm": 0.822501003742218,
+ "learning_rate": 0.0001840066543784675,
+ "loss": 1.7045,
+ "step": 547
+ },
+ {
+ "epoch": 0.5481541683598512,
+ "grad_norm": 0.7563886046409607,
+ "learning_rate": 0.00018394974264402257,
+ "loss": 1.6853,
+ "step": 548
+ },
+ {
+ "epoch": 0.549154449688975,
+ "grad_norm": 0.7408218383789062,
+ "learning_rate": 0.00018389273866399275,
+ "loss": 1.6496,
+ "step": 549
+ },
+ {
+ "epoch": 0.5501547310180989,
+ "grad_norm": 0.6454717516899109,
+ "learning_rate": 0.00018383564250101512,
+ "loss": 1.5063,
+ "step": 550
+ },
+ {
+ "epoch": 0.5511550123472226,
+ "grad_norm": 0.7033074498176575,
+ "learning_rate": 0.000183778454217828,
+ "loss": 1.6432,
+ "step": 551
+ },
+ {
+ "epoch": 0.5521552936763465,
+ "grad_norm": 0.768194854259491,
+ "learning_rate": 0.0001837211738772711,
+ "loss": 2.0594,
+ "step": 552
+ },
+ {
+ "epoch": 0.5531555750054703,
+ "grad_norm": 0.7805166244506836,
+ "learning_rate": 0.000183663801542285,
+ "loss": 1.4317,
+ "step": 553
+ },
+ {
+ "epoch": 0.5541558563345941,
+ "grad_norm": 0.6603556871414185,
+ "learning_rate": 0.00018360633727591155,
+ "loss": 1.4171,
+ "step": 554
+ },
+ {
+ "epoch": 0.555156137663718,
+ "grad_norm": 0.6996607780456543,
+ "learning_rate": 0.00018354878114129367,
+ "loss": 1.6832,
+ "step": 555
+ },
+ {
+ "epoch": 0.5561564189928417,
+ "grad_norm": 0.7861623167991638,
+ "learning_rate": 0.00018349113320167504,
+ "loss": 1.8425,
+ "step": 556
+ },
+ {
+ "epoch": 0.5571567003219655,
+ "grad_norm": 0.8387210369110107,
+ "learning_rate": 0.00018343339352040042,
+ "loss": 2.1272,
+ "step": 557
+ },
+ {
+ "epoch": 0.5581569816510894,
+ "grad_norm": 0.829555094242096,
+ "learning_rate": 0.00018337556216091517,
+ "loss": 1.4835,
+ "step": 558
+ },
+ {
+ "epoch": 0.5591572629802132,
+ "grad_norm": 0.7824863791465759,
+ "learning_rate": 0.00018331763918676556,
+ "loss": 1.8893,
+ "step": 559
+ },
+ {
+ "epoch": 0.560157544309337,
+ "grad_norm": 0.707683801651001,
+ "learning_rate": 0.00018325962466159848,
+ "loss": 1.6492,
+ "step": 560
+ },
+ {
+ "epoch": 0.5611578256384608,
+ "grad_norm": 0.775600254535675,
+ "learning_rate": 0.00018320151864916135,
+ "loss": 1.6542,
+ "step": 561
+ },
+ {
+ "epoch": 0.5621581069675846,
+ "grad_norm": 0.7602002024650574,
+ "learning_rate": 0.00018314332121330225,
+ "loss": 1.9625,
+ "step": 562
+ },
+ {
+ "epoch": 0.5631583882967085,
+ "grad_norm": 0.7535431385040283,
+ "learning_rate": 0.0001830850324179695,
+ "loss": 1.6407,
+ "step": 563
+ },
+ {
+ "epoch": 0.5641586696258323,
+ "grad_norm": 1.1884644031524658,
+ "learning_rate": 0.00018302665232721208,
+ "loss": 1.6188,
+ "step": 564
+ },
+ {
+ "epoch": 0.565158950954956,
+ "grad_norm": 0.7576595544815063,
+ "learning_rate": 0.0001829681810051791,
+ "loss": 1.7739,
+ "step": 565
+ },
+ {
+ "epoch": 0.5661592322840799,
+ "grad_norm": 0.6807442307472229,
+ "learning_rate": 0.00018290961851611995,
+ "loss": 1.6244,
+ "step": 566
+ },
+ {
+ "epoch": 0.5671595136132037,
+ "grad_norm": 0.7222456932067871,
+ "learning_rate": 0.00018285096492438424,
+ "loss": 1.7809,
+ "step": 567
+ },
+ {
+ "epoch": 0.5681597949423275,
+ "grad_norm": 0.7002213597297668,
+ "learning_rate": 0.00018279222029442163,
+ "loss": 1.5462,
+ "step": 568
+ },
+ {
+ "epoch": 0.5691600762714514,
+ "grad_norm": 0.8433569669723511,
+ "learning_rate": 0.00018273338469078186,
+ "loss": 1.5042,
+ "step": 569
+ },
+ {
+ "epoch": 0.5701603576005752,
+ "grad_norm": 0.663144588470459,
+ "learning_rate": 0.00018267445817811466,
+ "loss": 1.7133,
+ "step": 570
+ },
+ {
+ "epoch": 0.5711606389296989,
+ "grad_norm": 0.7298465967178345,
+ "learning_rate": 0.00018261544082116954,
+ "loss": 2.0201,
+ "step": 571
+ },
+ {
+ "epoch": 0.5721609202588228,
+ "grad_norm": 0.7613754868507385,
+ "learning_rate": 0.00018255633268479595,
+ "loss": 1.8065,
+ "step": 572
+ },
+ {
+ "epoch": 0.5731612015879466,
+ "grad_norm": 0.7252177596092224,
+ "learning_rate": 0.00018249713383394303,
+ "loss": 1.5715,
+ "step": 573
+ },
+ {
+ "epoch": 0.5741614829170705,
+ "grad_norm": 0.783961832523346,
+ "learning_rate": 0.0001824378443336596,
+ "loss": 1.7102,
+ "step": 574
+ },
+ {
+ "epoch": 0.5751617642461943,
+ "grad_norm": 0.8532115817070007,
+ "learning_rate": 0.00018237846424909413,
+ "loss": 1.7011,
+ "step": 575
+ },
+ {
+ "epoch": 0.576162045575318,
+ "grad_norm": 0.7841559052467346,
+ "learning_rate": 0.00018231899364549455,
+ "loss": 1.6397,
+ "step": 576
+ },
+ {
+ "epoch": 0.5771623269044419,
+ "grad_norm": 0.7118051648139954,
+ "learning_rate": 0.00018225943258820833,
+ "loss": 1.7166,
+ "step": 577
+ },
+ {
+ "epoch": 0.5781626082335657,
+ "grad_norm": 0.7298933863639832,
+ "learning_rate": 0.00018219978114268227,
+ "loss": 1.604,
+ "step": 578
+ },
+ {
+ "epoch": 0.5791628895626895,
+ "grad_norm": 0.6644678711891174,
+ "learning_rate": 0.00018214003937446253,
+ "loss": 1.7673,
+ "step": 579
+ },
+ {
+ "epoch": 0.5801631708918134,
+ "grad_norm": 0.6707085371017456,
+ "learning_rate": 0.00018208020734919455,
+ "loss": 1.662,
+ "step": 580
+ },
+ {
+ "epoch": 0.5811634522209371,
+ "grad_norm": 0.7431498765945435,
+ "learning_rate": 0.00018202028513262288,
+ "loss": 1.7757,
+ "step": 581
+ },
+ {
+ "epoch": 0.5821637335500609,
+ "grad_norm": 0.6936060190200806,
+ "learning_rate": 0.00018196027279059117,
+ "loss": 1.8464,
+ "step": 582
+ },
+ {
+ "epoch": 0.5831640148791848,
+ "grad_norm": 0.6512508988380432,
+ "learning_rate": 0.00018190017038904215,
+ "loss": 1.5537,
+ "step": 583
+ },
+ {
+ "epoch": 0.5841642962083086,
+ "grad_norm": 0.7541425228118896,
+ "learning_rate": 0.0001818399779940175,
+ "loss": 1.8552,
+ "step": 584
+ },
+ {
+ "epoch": 0.5851645775374325,
+ "grad_norm": 0.720447301864624,
+ "learning_rate": 0.0001817796956716578,
+ "loss": 1.5392,
+ "step": 585
+ },
+ {
+ "epoch": 0.5861648588665562,
+ "grad_norm": 0.7526831030845642,
+ "learning_rate": 0.00018171932348820234,
+ "loss": 1.8224,
+ "step": 586
+ },
+ {
+ "epoch": 0.58716514019568,
+ "grad_norm": 0.6906039714813232,
+ "learning_rate": 0.0001816588615099893,
+ "loss": 1.6498,
+ "step": 587
+ },
+ {
+ "epoch": 0.5881654215248039,
+ "grad_norm": 0.726737380027771,
+ "learning_rate": 0.00018159830980345548,
+ "loss": 1.5377,
+ "step": 588
+ },
+ {
+ "epoch": 0.5891657028539277,
+ "grad_norm": 0.6792006492614746,
+ "learning_rate": 0.0001815376684351362,
+ "loss": 1.8905,
+ "step": 589
+ },
+ {
+ "epoch": 0.5901659841830514,
+ "grad_norm": 0.7885284423828125,
+ "learning_rate": 0.00018147693747166534,
+ "loss": 1.709,
+ "step": 590
+ },
+ {
+ "epoch": 0.5911662655121753,
+ "grad_norm": 0.7270367741584778,
+ "learning_rate": 0.00018141611697977529,
+ "loss": 1.7973,
+ "step": 591
+ },
+ {
+ "epoch": 0.5921665468412991,
+ "grad_norm": 0.7852438688278198,
+ "learning_rate": 0.00018135520702629675,
+ "loss": 1.6312,
+ "step": 592
+ },
+ {
+ "epoch": 0.5931668281704229,
+ "grad_norm": 0.7881343960762024,
+ "learning_rate": 0.0001812942076781588,
+ "loss": 1.7581,
+ "step": 593
+ },
+ {
+ "epoch": 0.5941671094995468,
+ "grad_norm": 0.7581279277801514,
+ "learning_rate": 0.0001812331190023886,
+ "loss": 1.6811,
+ "step": 594
+ },
+ {
+ "epoch": 0.5951673908286705,
+ "grad_norm": 0.7250573039054871,
+ "learning_rate": 0.0001811719410661116,
+ "loss": 1.6835,
+ "step": 595
+ },
+ {
+ "epoch": 0.5961676721577944,
+ "grad_norm": 0.7383652329444885,
+ "learning_rate": 0.00018111067393655132,
+ "loss": 1.7804,
+ "step": 596
+ },
+ {
+ "epoch": 0.5971679534869182,
+ "grad_norm": 0.6631885170936584,
+ "learning_rate": 0.0001810493176810292,
+ "loss": 1.477,
+ "step": 597
+ },
+ {
+ "epoch": 0.598168234816042,
+ "grad_norm": 0.6705698370933533,
+ "learning_rate": 0.00018098787236696474,
+ "loss": 1.5939,
+ "step": 598
+ },
+ {
+ "epoch": 0.5991685161451659,
+ "grad_norm": 0.6646862626075745,
+ "learning_rate": 0.00018092633806187513,
+ "loss": 1.5903,
+ "step": 599
+ },
+ {
+ "epoch": 0.6001687974742896,
+ "grad_norm": 0.7267604470252991,
+ "learning_rate": 0.0001808647148333755,
+ "loss": 1.6864,
+ "step": 600
+ },
+ {
+ "epoch": 0.6011690788034134,
+ "grad_norm": 0.672102689743042,
+ "learning_rate": 0.00018080300274917862,
+ "loss": 1.73,
+ "step": 601
+ },
+ {
+ "epoch": 0.6021693601325373,
+ "grad_norm": 0.7541230320930481,
+ "learning_rate": 0.00018074120187709495,
+ "loss": 1.7824,
+ "step": 602
+ },
+ {
+ "epoch": 0.6031696414616611,
+ "grad_norm": 0.6589316129684448,
+ "learning_rate": 0.00018067931228503246,
+ "loss": 1.7007,
+ "step": 603
+ },
+ {
+ "epoch": 0.604169922790785,
+ "grad_norm": 0.7083007097244263,
+ "learning_rate": 0.00018061733404099655,
+ "loss": 1.7703,
+ "step": 604
+ },
+ {
+ "epoch": 0.6051702041199087,
+ "grad_norm": 0.6700689196586609,
+ "learning_rate": 0.00018055526721309016,
+ "loss": 1.5967,
+ "step": 605
+ },
+ {
+ "epoch": 0.6061704854490325,
+ "grad_norm": 0.6766354441642761,
+ "learning_rate": 0.0001804931118695135,
+ "loss": 1.6208,
+ "step": 606
+ },
+ {
+ "epoch": 0.6071707667781564,
+ "grad_norm": 0.8214102387428284,
+ "learning_rate": 0.00018043086807856403,
+ "loss": 1.6084,
+ "step": 607
+ },
+ {
+ "epoch": 0.6081710481072802,
+ "grad_norm": 0.736492395401001,
+ "learning_rate": 0.00018036853590863648,
+ "loss": 1.7254,
+ "step": 608
+ },
+ {
+ "epoch": 0.609171329436404,
+ "grad_norm": 0.6791033148765564,
+ "learning_rate": 0.00018030611542822257,
+ "loss": 1.5837,
+ "step": 609
+ },
+ {
+ "epoch": 0.6101716107655278,
+ "grad_norm": 0.6344060301780701,
+ "learning_rate": 0.00018024360670591114,
+ "loss": 1.5407,
+ "step": 610
+ },
+ {
+ "epoch": 0.6111718920946516,
+ "grad_norm": 0.9026575088500977,
+ "learning_rate": 0.00018018100981038798,
+ "loss": 1.6748,
+ "step": 611
+ },
+ {
+ "epoch": 0.6121721734237754,
+ "grad_norm": 0.8029866814613342,
+ "learning_rate": 0.00018011832481043576,
+ "loss": 1.7547,
+ "step": 612
+ },
+ {
+ "epoch": 0.6131724547528993,
+ "grad_norm": 0.8065117597579956,
+ "learning_rate": 0.00018005555177493394,
+ "loss": 1.8051,
+ "step": 613
+ },
+ {
+ "epoch": 0.614172736082023,
+ "grad_norm": 0.7858480215072632,
+ "learning_rate": 0.00017999269077285875,
+ "loss": 1.6728,
+ "step": 614
+ },
+ {
+ "epoch": 0.6151730174111469,
+ "grad_norm": 0.6735272407531738,
+ "learning_rate": 0.00017992974187328305,
+ "loss": 1.8585,
+ "step": 615
+ },
+ {
+ "epoch": 0.6161732987402707,
+ "grad_norm": 0.7518951892852783,
+ "learning_rate": 0.00017986670514537627,
+ "loss": 1.5429,
+ "step": 616
+ },
+ {
+ "epoch": 0.6171735800693945,
+ "grad_norm": 0.6952928900718689,
+ "learning_rate": 0.00017980358065840444,
+ "loss": 1.5982,
+ "step": 617
+ },
+ {
+ "epoch": 0.6181738613985184,
+ "grad_norm": 0.8996840119361877,
+ "learning_rate": 0.0001797403684817299,
+ "loss": 1.8164,
+ "step": 618
+ },
+ {
+ "epoch": 0.6191741427276422,
+ "grad_norm": 0.7645425200462341,
+ "learning_rate": 0.00017967706868481144,
+ "loss": 1.8373,
+ "step": 619
+ },
+ {
+ "epoch": 0.6201744240567659,
+ "grad_norm": 0.8479064106941223,
+ "learning_rate": 0.00017961368133720407,
+ "loss": 1.6483,
+ "step": 620
+ },
+ {
+ "epoch": 0.6211747053858898,
+ "grad_norm": 0.7806827425956726,
+ "learning_rate": 0.000179550206508559,
+ "loss": 1.78,
+ "step": 621
+ },
+ {
+ "epoch": 0.6221749867150136,
+ "grad_norm": 0.6476775407791138,
+ "learning_rate": 0.00017948664426862364,
+ "loss": 1.7712,
+ "step": 622
+ },
+ {
+ "epoch": 0.6231752680441374,
+ "grad_norm": 0.8421279788017273,
+ "learning_rate": 0.00017942299468724134,
+ "loss": 1.7753,
+ "step": 623
+ },
+ {
+ "epoch": 0.6241755493732613,
+ "grad_norm": 0.6706071496009827,
+ "learning_rate": 0.0001793592578343515,
+ "loss": 1.4093,
+ "step": 624
+ },
+ {
+ "epoch": 0.625175830702385,
+ "grad_norm": 0.8224231004714966,
+ "learning_rate": 0.0001792954337799894,
+ "loss": 1.7343,
+ "step": 625
+ },
+ {
+ "epoch": 0.6261761120315089,
+ "grad_norm": 0.8398690819740295,
+ "learning_rate": 0.00017923152259428612,
+ "loss": 1.8017,
+ "step": 626
+ },
+ {
+ "epoch": 0.6271763933606327,
+ "grad_norm": 0.6664738059043884,
+ "learning_rate": 0.00017916752434746856,
+ "loss": 1.6023,
+ "step": 627
+ },
+ {
+ "epoch": 0.6281766746897565,
+ "grad_norm": 0.9246477484703064,
+ "learning_rate": 0.0001791034391098591,
+ "loss": 1.7862,
+ "step": 628
+ },
+ {
+ "epoch": 0.6291769560188804,
+ "grad_norm": 0.797835111618042,
+ "learning_rate": 0.00017903926695187595,
+ "loss": 1.6059,
+ "step": 629
+ },
+ {
+ "epoch": 0.6301772373480041,
+ "grad_norm": 0.613727331161499,
+ "learning_rate": 0.0001789750079440326,
+ "loss": 1.5086,
+ "step": 630
+ },
+ {
+ "epoch": 0.6311775186771279,
+ "grad_norm": 0.7127765417098999,
+ "learning_rate": 0.00017891066215693817,
+ "loss": 1.5985,
+ "step": 631
+ },
+ {
+ "epoch": 0.6321778000062518,
+ "grad_norm": 0.6923073530197144,
+ "learning_rate": 0.00017884622966129695,
+ "loss": 1.5537,
+ "step": 632
+ },
+ {
+ "epoch": 0.6331780813353756,
+ "grad_norm": 0.7015733122825623,
+ "learning_rate": 0.00017878171052790868,
+ "loss": 1.7782,
+ "step": 633
+ },
+ {
+ "epoch": 0.6341783626644993,
+ "grad_norm": 0.6932784914970398,
+ "learning_rate": 0.00017871710482766817,
+ "loss": 1.4294,
+ "step": 634
+ },
+ {
+ "epoch": 0.6351786439936232,
+ "grad_norm": 0.7054254412651062,
+ "learning_rate": 0.00017865241263156546,
+ "loss": 1.7227,
+ "step": 635
+ },
+ {
+ "epoch": 0.636178925322747,
+ "grad_norm": 0.6994242072105408,
+ "learning_rate": 0.0001785876340106855,
+ "loss": 1.5998,
+ "step": 636
+ },
+ {
+ "epoch": 0.6371792066518709,
+ "grad_norm": 0.81461501121521,
+ "learning_rate": 0.0001785227690362083,
+ "loss": 1.7212,
+ "step": 637
+ },
+ {
+ "epoch": 0.6381794879809947,
+ "grad_norm": 0.943434476852417,
+ "learning_rate": 0.00017845781777940878,
+ "loss": 1.5926,
+ "step": 638
+ },
+ {
+ "epoch": 0.6391797693101184,
+ "grad_norm": 0.8455945253372192,
+ "learning_rate": 0.00017839278031165658,
+ "loss": 1.8511,
+ "step": 639
+ },
+ {
+ "epoch": 0.6401800506392423,
+ "grad_norm": 0.9348243474960327,
+ "learning_rate": 0.00017832765670441612,
+ "loss": 1.6293,
+ "step": 640
+ },
+ {
+ "epoch": 0.6411803319683661,
+ "grad_norm": 0.746127724647522,
+ "learning_rate": 0.0001782624470292465,
+ "loss": 1.4903,
+ "step": 641
+ },
+ {
+ "epoch": 0.6421806132974899,
+ "grad_norm": 0.6215783357620239,
+ "learning_rate": 0.0001781971513578013,
+ "loss": 1.7806,
+ "step": 642
+ },
+ {
+ "epoch": 0.6431808946266138,
+ "grad_norm": 0.7447994947433472,
+ "learning_rate": 0.00017813176976182873,
+ "loss": 1.7475,
+ "step": 643
+ },
+ {
+ "epoch": 0.6441811759557375,
+ "grad_norm": 0.6916540265083313,
+ "learning_rate": 0.00017806630231317127,
+ "loss": 1.6401,
+ "step": 644
+ },
+ {
+ "epoch": 0.6451814572848613,
+ "grad_norm": 0.7208524942398071,
+ "learning_rate": 0.00017800074908376584,
+ "loss": 1.7524,
+ "step": 645
+ },
+ {
+ "epoch": 0.6461817386139852,
+ "grad_norm": 0.7548331618309021,
+ "learning_rate": 0.00017793511014564358,
+ "loss": 1.5644,
+ "step": 646
+ },
+ {
+ "epoch": 0.647182019943109,
+ "grad_norm": 0.7919667959213257,
+ "learning_rate": 0.00017786938557092983,
+ "loss": 1.6758,
+ "step": 647
+ },
+ {
+ "epoch": 0.6481823012722329,
+ "grad_norm": 0.700618326663971,
+ "learning_rate": 0.00017780357543184397,
+ "loss": 1.5213,
+ "step": 648
+ },
+ {
+ "epoch": 0.6491825826013566,
+ "grad_norm": 0.6646535992622375,
+ "learning_rate": 0.00017773767980069945,
+ "loss": 1.6487,
+ "step": 649
+ },
+ {
+ "epoch": 0.6501828639304804,
+ "grad_norm": 0.6486669182777405,
+ "learning_rate": 0.0001776716987499037,
+ "loss": 1.6556,
+ "step": 650
+ },
+ {
+ "epoch": 0.6511831452596043,
+ "grad_norm": 0.657747745513916,
+ "learning_rate": 0.0001776056323519579,
+ "loss": 1.5943,
+ "step": 651
+ },
+ {
+ "epoch": 0.6521834265887281,
+ "grad_norm": 0.7777379751205444,
+ "learning_rate": 0.00017753948067945712,
+ "loss": 1.6069,
+ "step": 652
+ },
+ {
+ "epoch": 0.6531837079178519,
+ "grad_norm": 0.772153913974762,
+ "learning_rate": 0.00017747324380509006,
+ "loss": 1.7065,
+ "step": 653
+ },
+ {
+ "epoch": 0.6541839892469757,
+ "grad_norm": 0.6984367966651917,
+ "learning_rate": 0.00017740692180163908,
+ "loss": 1.7122,
+ "step": 654
+ },
+ {
+ "epoch": 0.6551842705760995,
+ "grad_norm": 0.8033855557441711,
+ "learning_rate": 0.00017734051474198003,
+ "loss": 1.6095,
+ "step": 655
+ },
+ {
+ "epoch": 0.6561845519052233,
+ "grad_norm": 0.7568691372871399,
+ "learning_rate": 0.0001772740226990823,
+ "loss": 1.6783,
+ "step": 656
+ },
+ {
+ "epoch": 0.6571848332343472,
+ "grad_norm": 0.7288162708282471,
+ "learning_rate": 0.00017720744574600863,
+ "loss": 1.695,
+ "step": 657
+ },
+ {
+ "epoch": 0.658185114563471,
+ "grad_norm": 0.6898120045661926,
+ "learning_rate": 0.00017714078395591502,
+ "loss": 1.6539,
+ "step": 658
+ },
+ {
+ "epoch": 0.6591853958925948,
+ "grad_norm": 0.6977367997169495,
+ "learning_rate": 0.00017707403740205071,
+ "loss": 1.4558,
+ "step": 659
+ },
+ {
+ "epoch": 0.6601856772217186,
+ "grad_norm": 0.6594682335853577,
+ "learning_rate": 0.00017700720615775812,
+ "loss": 1.56,
+ "step": 660
+ },
+ {
+ "epoch": 0.6611859585508424,
+ "grad_norm": 0.6146736741065979,
+ "learning_rate": 0.0001769402902964727,
+ "loss": 1.7014,
+ "step": 661
+ },
+ {
+ "epoch": 0.6621862398799663,
+ "grad_norm": 0.7182234525680542,
+ "learning_rate": 0.00017687328989172288,
+ "loss": 1.5655,
+ "step": 662
+ },
+ {
+ "epoch": 0.66318652120909,
+ "grad_norm": 0.6940692067146301,
+ "learning_rate": 0.00017680620501712996,
+ "loss": 1.6177,
+ "step": 663
+ },
+ {
+ "epoch": 0.6641868025382138,
+ "grad_norm": 0.7672961950302124,
+ "learning_rate": 0.00017673903574640814,
+ "loss": 1.559,
+ "step": 664
+ },
+ {
+ "epoch": 0.6651870838673377,
+ "grad_norm": 0.654500424861908,
+ "learning_rate": 0.00017667178215336423,
+ "loss": 1.5024,
+ "step": 665
+ },
+ {
+ "epoch": 0.6661873651964615,
+ "grad_norm": 0.8137261867523193,
+ "learning_rate": 0.0001766044443118978,
+ "loss": 1.7865,
+ "step": 666
+ },
+ {
+ "epoch": 0.6671876465255854,
+ "grad_norm": 0.806624710559845,
+ "learning_rate": 0.000176537022296001,
+ "loss": 1.4944,
+ "step": 667
+ },
+ {
+ "epoch": 0.6681879278547092,
+ "grad_norm": 0.7952747941017151,
+ "learning_rate": 0.00017646951617975837,
+ "loss": 1.5371,
+ "step": 668
+ },
+ {
+ "epoch": 0.6691882091838329,
+ "grad_norm": 0.6380738615989685,
+ "learning_rate": 0.00017640192603734692,
+ "loss": 1.3117,
+ "step": 669
+ },
+ {
+ "epoch": 0.6701884905129568,
+ "grad_norm": 0.6559002995491028,
+ "learning_rate": 0.00017633425194303606,
+ "loss": 1.3662,
+ "step": 670
+ },
+ {
+ "epoch": 0.6711887718420806,
+ "grad_norm": 0.715826153755188,
+ "learning_rate": 0.00017626649397118734,
+ "loss": 1.7271,
+ "step": 671
+ },
+ {
+ "epoch": 0.6721890531712044,
+ "grad_norm": 0.6719872355461121,
+ "learning_rate": 0.00017619865219625452,
+ "loss": 1.747,
+ "step": 672
+ },
+ {
+ "epoch": 0.6731893345003283,
+ "grad_norm": 0.6901715397834778,
+ "learning_rate": 0.00017613072669278343,
+ "loss": 1.6438,
+ "step": 673
+ },
+ {
+ "epoch": 0.674189615829452,
+ "grad_norm": 0.6601479649543762,
+ "learning_rate": 0.00017606271753541192,
+ "loss": 1.8191,
+ "step": 674
+ },
+ {
+ "epoch": 0.6751898971585758,
+ "grad_norm": 0.8059187531471252,
+ "learning_rate": 0.00017599462479886974,
+ "loss": 1.6946,
+ "step": 675
+ },
+ {
+ "epoch": 0.6761901784876997,
+ "grad_norm": 0.6966856718063354,
+ "learning_rate": 0.00017592644855797854,
+ "loss": 1.5551,
+ "step": 676
+ },
+ {
+ "epoch": 0.6771904598168235,
+ "grad_norm": 0.7306144833564758,
+ "learning_rate": 0.00017585818888765168,
+ "loss": 1.5429,
+ "step": 677
+ },
+ {
+ "epoch": 0.6781907411459474,
+ "grad_norm": 0.572907030582428,
+ "learning_rate": 0.0001757898458628941,
+ "loss": 1.4437,
+ "step": 678
+ },
+ {
+ "epoch": 0.6791910224750711,
+ "grad_norm": 0.6807466149330139,
+ "learning_rate": 0.00017572141955880252,
+ "loss": 1.6307,
+ "step": 679
+ },
+ {
+ "epoch": 0.6801913038041949,
+ "grad_norm": 0.7529204487800598,
+ "learning_rate": 0.00017565291005056504,
+ "loss": 1.631,
+ "step": 680
+ },
+ {
+ "epoch": 0.6811915851333188,
+ "grad_norm": 0.6292940378189087,
+ "learning_rate": 0.00017558431741346122,
+ "loss": 1.7512,
+ "step": 681
+ },
+ {
+ "epoch": 0.6821918664624426,
+ "grad_norm": 0.7981480956077576,
+ "learning_rate": 0.00017551564172286197,
+ "loss": 1.7704,
+ "step": 682
+ },
+ {
+ "epoch": 0.6831921477915663,
+ "grad_norm": 0.7816259860992432,
+ "learning_rate": 0.00017544688305422943,
+ "loss": 1.4954,
+ "step": 683
+ },
+ {
+ "epoch": 0.6841924291206902,
+ "grad_norm": 0.6866456866264343,
+ "learning_rate": 0.00017537804148311695,
+ "loss": 1.7986,
+ "step": 684
+ },
+ {
+ "epoch": 0.685192710449814,
+ "grad_norm": 0.7499064803123474,
+ "learning_rate": 0.00017530911708516902,
+ "loss": 1.6472,
+ "step": 685
+ },
+ {
+ "epoch": 0.6861929917789378,
+ "grad_norm": 0.5923457145690918,
+ "learning_rate": 0.00017524010993612098,
+ "loss": 1.4866,
+ "step": 686
+ },
+ {
+ "epoch": 0.6871932731080617,
+ "grad_norm": 0.6991822719573975,
+ "learning_rate": 0.00017517102011179933,
+ "loss": 1.605,
+ "step": 687
+ },
+ {
+ "epoch": 0.6881935544371854,
+ "grad_norm": 0.7880247235298157,
+ "learning_rate": 0.0001751018476881212,
+ "loss": 1.641,
+ "step": 688
+ },
+ {
+ "epoch": 0.6891938357663093,
+ "grad_norm": 0.7848097085952759,
+ "learning_rate": 0.00017503259274109464,
+ "loss": 1.7505,
+ "step": 689
+ },
+ {
+ "epoch": 0.6901941170954331,
+ "grad_norm": 0.693678081035614,
+ "learning_rate": 0.00017496325534681825,
+ "loss": 1.6565,
+ "step": 690
+ },
+ {
+ "epoch": 0.6911943984245569,
+ "grad_norm": 0.8232877254486084,
+ "learning_rate": 0.00017489383558148136,
+ "loss": 1.7664,
+ "step": 691
+ },
+ {
+ "epoch": 0.6921946797536808,
+ "grad_norm": 0.7834855914115906,
+ "learning_rate": 0.00017482433352136365,
+ "loss": 1.4381,
+ "step": 692
+ },
+ {
+ "epoch": 0.6931949610828045,
+ "grad_norm": 0.6186713576316833,
+ "learning_rate": 0.00017475474924283536,
+ "loss": 1.6482,
+ "step": 693
+ },
+ {
+ "epoch": 0.6941952424119283,
+ "grad_norm": 0.7511133551597595,
+ "learning_rate": 0.00017468508282235704,
+ "loss": 1.6186,
+ "step": 694
+ },
+ {
+ "epoch": 0.6951955237410522,
+ "grad_norm": 0.8017745614051819,
+ "learning_rate": 0.00017461533433647946,
+ "loss": 1.6597,
+ "step": 695
+ },
+ {
+ "epoch": 0.696195805070176,
+ "grad_norm": 0.8190794587135315,
+ "learning_rate": 0.00017454550386184362,
+ "loss": 1.6602,
+ "step": 696
+ },
+ {
+ "epoch": 0.6971960863992998,
+ "grad_norm": 0.7479042410850525,
+ "learning_rate": 0.00017447559147518055,
+ "loss": 1.77,
+ "step": 697
+ },
+ {
+ "epoch": 0.6981963677284236,
+ "grad_norm": 0.7239962816238403,
+ "learning_rate": 0.00017440559725331135,
+ "loss": 1.5838,
+ "step": 698
+ },
+ {
+ "epoch": 0.6991966490575474,
+ "grad_norm": 0.7252762317657471,
+ "learning_rate": 0.000174335521273147,
+ "loss": 1.5462,
+ "step": 699
+ },
+ {
+ "epoch": 0.7001969303866713,
+ "grad_norm": 0.9383960962295532,
+ "learning_rate": 0.00017426536361168834,
+ "loss": 1.5104,
+ "step": 700
+ },
+ {
+ "epoch": 0.7011972117157951,
+ "grad_norm": 0.6944159269332886,
+ "learning_rate": 0.00017419512434602594,
+ "loss": 1.6382,
+ "step": 701
+ },
+ {
+ "epoch": 0.7021974930449189,
+ "grad_norm": 0.6809273362159729,
+ "learning_rate": 0.00017412480355334005,
+ "loss": 1.725,
+ "step": 702
+ },
+ {
+ "epoch": 0.7031977743740427,
+ "grad_norm": 0.7521125674247742,
+ "learning_rate": 0.00017405440131090048,
+ "loss": 1.8499,
+ "step": 703
+ },
+ {
+ "epoch": 0.7041980557031665,
+ "grad_norm": 0.6854100227355957,
+ "learning_rate": 0.00017398391769606658,
+ "loss": 1.6648,
+ "step": 704
+ },
+ {
+ "epoch": 0.7051983370322903,
+ "grad_norm": 0.7382327318191528,
+ "learning_rate": 0.00017391335278628712,
+ "loss": 1.5806,
+ "step": 705
+ },
+ {
+ "epoch": 0.7061986183614142,
+ "grad_norm": 0.7387582063674927,
+ "learning_rate": 0.00017384270665910014,
+ "loss": 1.5563,
+ "step": 706
+ },
+ {
+ "epoch": 0.707198899690538,
+ "grad_norm": 0.7698972821235657,
+ "learning_rate": 0.000173771979392133,
+ "loss": 1.6626,
+ "step": 707
+ },
+ {
+ "epoch": 0.7081991810196617,
+ "grad_norm": 0.7639899849891663,
+ "learning_rate": 0.00017370117106310214,
+ "loss": 1.6725,
+ "step": 708
+ },
+ {
+ "epoch": 0.7091994623487856,
+ "grad_norm": 0.6684393286705017,
+ "learning_rate": 0.0001736302817498131,
+ "loss": 1.64,
+ "step": 709
+ },
+ {
+ "epoch": 0.7101997436779094,
+ "grad_norm": 0.6329504251480103,
+ "learning_rate": 0.00017355931153016044,
+ "loss": 1.4472,
+ "step": 710
+ },
+ {
+ "epoch": 0.7112000250070333,
+ "grad_norm": 0.8133587837219238,
+ "learning_rate": 0.0001734882604821276,
+ "loss": 1.7971,
+ "step": 711
+ },
+ {
+ "epoch": 0.712200306336157,
+ "grad_norm": 0.6524143218994141,
+ "learning_rate": 0.0001734171286837868,
+ "loss": 1.5366,
+ "step": 712
+ },
+ {
+ "epoch": 0.7132005876652808,
+ "grad_norm": 0.6714311242103577,
+ "learning_rate": 0.00017334591621329906,
+ "loss": 1.841,
+ "step": 713
+ },
+ {
+ "epoch": 0.7142008689944047,
+ "grad_norm": 0.6690782904624939,
+ "learning_rate": 0.00017327462314891402,
+ "loss": 1.623,
+ "step": 714
+ },
+ {
+ "epoch": 0.7152011503235285,
+ "grad_norm": 0.650442361831665,
+ "learning_rate": 0.00017320324956896977,
+ "loss": 1.6124,
+ "step": 715
+ },
+ {
+ "epoch": 0.7162014316526523,
+ "grad_norm": 0.7075713276863098,
+ "learning_rate": 0.00017313179555189306,
+ "loss": 1.5154,
+ "step": 716
+ },
+ {
+ "epoch": 0.7172017129817762,
+ "grad_norm": 0.729060173034668,
+ "learning_rate": 0.00017306026117619889,
+ "loss": 1.7072,
+ "step": 717
+ },
+ {
+ "epoch": 0.7182019943108999,
+ "grad_norm": 0.8547433614730835,
+ "learning_rate": 0.0001729886465204906,
+ "loss": 1.6237,
+ "step": 718
+ },
+ {
+ "epoch": 0.7192022756400237,
+ "grad_norm": 0.6729336380958557,
+ "learning_rate": 0.0001729169516634598,
+ "loss": 1.7769,
+ "step": 719
+ },
+ {
+ "epoch": 0.7202025569691476,
+ "grad_norm": 0.7437167167663574,
+ "learning_rate": 0.0001728451766838861,
+ "loss": 1.5056,
+ "step": 720
+ },
+ {
+ "epoch": 0.7212028382982714,
+ "grad_norm": 0.6573147177696228,
+ "learning_rate": 0.00017277332166063726,
+ "loss": 1.7694,
+ "step": 721
+ },
+ {
+ "epoch": 0.7222031196273953,
+ "grad_norm": 0.6767126321792603,
+ "learning_rate": 0.00017270138667266894,
+ "loss": 1.6014,
+ "step": 722
+ },
+ {
+ "epoch": 0.723203400956519,
+ "grad_norm": 0.7488179206848145,
+ "learning_rate": 0.00017262937179902472,
+ "loss": 1.573,
+ "step": 723
+ },
+ {
+ "epoch": 0.7242036822856428,
+ "grad_norm": 0.6491002440452576,
+ "learning_rate": 0.00017255727711883588,
+ "loss": 1.6705,
+ "step": 724
+ },
+ {
+ "epoch": 0.7252039636147667,
+ "grad_norm": 0.764090359210968,
+ "learning_rate": 0.00017248510271132144,
+ "loss": 1.6761,
+ "step": 725
+ },
+ {
+ "epoch": 0.7262042449438905,
+ "grad_norm": 0.7116997838020325,
+ "learning_rate": 0.00017241284865578802,
+ "loss": 1.7435,
+ "step": 726
+ },
+ {
+ "epoch": 0.7272045262730142,
+ "grad_norm": 0.6367645859718323,
+ "learning_rate": 0.00017234051503162978,
+ "loss": 1.7061,
+ "step": 727
+ },
+ {
+ "epoch": 0.7282048076021381,
+ "grad_norm": 0.7232155203819275,
+ "learning_rate": 0.0001722681019183283,
+ "loss": 1.8142,
+ "step": 728
+ },
+ {
+ "epoch": 0.7292050889312619,
+ "grad_norm": 0.7533649802207947,
+ "learning_rate": 0.00017219560939545246,
+ "loss": 1.8202,
+ "step": 729
+ },
+ {
+ "epoch": 0.7302053702603858,
+ "grad_norm": 0.6923018097877502,
+ "learning_rate": 0.00017212303754265843,
+ "loss": 1.4925,
+ "step": 730
+ },
+ {
+ "epoch": 0.7312056515895096,
+ "grad_norm": 0.7326932549476624,
+ "learning_rate": 0.0001720503864396896,
+ "loss": 1.5192,
+ "step": 731
+ },
+ {
+ "epoch": 0.7322059329186333,
+ "grad_norm": 0.7220762968063354,
+ "learning_rate": 0.00017197765616637636,
+ "loss": 1.7601,
+ "step": 732
+ },
+ {
+ "epoch": 0.7332062142477572,
+ "grad_norm": 0.605725884437561,
+ "learning_rate": 0.0001719048468026361,
+ "loss": 1.6309,
+ "step": 733
+ },
+ {
+ "epoch": 0.734206495576881,
+ "grad_norm": 0.6728388667106628,
+ "learning_rate": 0.00017183195842847322,
+ "loss": 1.5993,
+ "step": 734
+ },
+ {
+ "epoch": 0.7352067769060048,
+ "grad_norm": 0.7035244703292847,
+ "learning_rate": 0.0001717589911239788,
+ "loss": 1.6031,
+ "step": 735
+ },
+ {
+ "epoch": 0.7362070582351287,
+ "grad_norm": 0.7473010420799255,
+ "learning_rate": 0.00017168594496933074,
+ "loss": 1.5833,
+ "step": 736
+ },
+ {
+ "epoch": 0.7372073395642524,
+ "grad_norm": 0.6310701370239258,
+ "learning_rate": 0.00017161282004479351,
+ "loss": 1.4328,
+ "step": 737
+ },
+ {
+ "epoch": 0.7382076208933762,
+ "grad_norm": 0.6805673837661743,
+ "learning_rate": 0.0001715396164307182,
+ "loss": 1.5429,
+ "step": 738
+ },
+ {
+ "epoch": 0.7392079022225001,
+ "grad_norm": 0.747222900390625,
+ "learning_rate": 0.0001714663342075424,
+ "loss": 1.7696,
+ "step": 739
+ },
+ {
+ "epoch": 0.7402081835516239,
+ "grad_norm": 0.8214403390884399,
+ "learning_rate": 0.00017139297345578994,
+ "loss": 1.5997,
+ "step": 740
+ },
+ {
+ "epoch": 0.7412084648807478,
+ "grad_norm": 0.6722521781921387,
+ "learning_rate": 0.00017131953425607104,
+ "loss": 1.5287,
+ "step": 741
+ },
+ {
+ "epoch": 0.7422087462098715,
+ "grad_norm": 0.6937971115112305,
+ "learning_rate": 0.00017124601668908212,
+ "loss": 1.7263,
+ "step": 742
+ },
+ {
+ "epoch": 0.7432090275389953,
+ "grad_norm": 0.7590844631195068,
+ "learning_rate": 0.00017117242083560568,
+ "loss": 1.7263,
+ "step": 743
+ },
+ {
+ "epoch": 0.7442093088681192,
+ "grad_norm": 0.7913306355476379,
+ "learning_rate": 0.00017109874677651024,
+ "loss": 1.7646,
+ "step": 744
+ },
+ {
+ "epoch": 0.745209590197243,
+ "grad_norm": 0.7123669385910034,
+ "learning_rate": 0.0001710249945927503,
+ "loss": 1.6768,
+ "step": 745
+ },
+ {
+ "epoch": 0.7462098715263668,
+ "grad_norm": 0.8426288366317749,
+ "learning_rate": 0.00017095116436536612,
+ "loss": 1.8496,
+ "step": 746
+ },
+ {
+ "epoch": 0.7472101528554906,
+ "grad_norm": 0.6152015328407288,
+ "learning_rate": 0.00017087725617548385,
+ "loss": 1.4527,
+ "step": 747
+ },
+ {
+ "epoch": 0.7482104341846144,
+ "grad_norm": 0.8348223567008972,
+ "learning_rate": 0.00017080327010431513,
+ "loss": 1.4847,
+ "step": 748
+ },
+ {
+ "epoch": 0.7492107155137382,
+ "grad_norm": 0.7883800268173218,
+ "learning_rate": 0.00017072920623315734,
+ "loss": 1.5941,
+ "step": 749
+ },
+ {
+ "epoch": 0.7502109968428621,
+ "grad_norm": 0.6957768201828003,
+ "learning_rate": 0.00017065506464339326,
+ "loss": 1.7543,
+ "step": 750
+ },
+ {
+ "epoch": 0.7512112781719859,
+ "grad_norm": 0.5898700952529907,
+ "learning_rate": 0.00017058084541649106,
+ "loss": 1.7859,
+ "step": 751
+ },
+ {
+ "epoch": 0.7522115595011097,
+ "grad_norm": 0.6882239580154419,
+ "learning_rate": 0.00017050654863400429,
+ "loss": 1.3233,
+ "step": 752
+ },
+ {
+ "epoch": 0.7532118408302335,
+ "grad_norm": 0.7327316999435425,
+ "learning_rate": 0.00017043217437757164,
+ "loss": 1.5067,
+ "step": 753
+ },
+ {
+ "epoch": 0.7542121221593573,
+ "grad_norm": 0.9257964491844177,
+ "learning_rate": 0.00017035772272891702,
+ "loss": 1.503,
+ "step": 754
+ },
+ {
+ "epoch": 0.7552124034884812,
+ "grad_norm": 0.7924116253852844,
+ "learning_rate": 0.00017028319376984928,
+ "loss": 1.8975,
+ "step": 755
+ },
+ {
+ "epoch": 0.756212684817605,
+ "grad_norm": 0.6651099920272827,
+ "learning_rate": 0.00017020858758226229,
+ "loss": 1.649,
+ "step": 756
+ },
+ {
+ "epoch": 0.7572129661467287,
+ "grad_norm": 0.7257362604141235,
+ "learning_rate": 0.0001701339042481347,
+ "loss": 1.6919,
+ "step": 757
+ },
+ {
+ "epoch": 0.7582132474758526,
+ "grad_norm": 0.8733739852905273,
+ "learning_rate": 0.00017005914384953007,
+ "loss": 1.5929,
+ "step": 758
+ },
+ {
+ "epoch": 0.7592135288049764,
+ "grad_norm": 0.6347383856773376,
+ "learning_rate": 0.00016998430646859654,
+ "loss": 1.3341,
+ "step": 759
+ },
+ {
+ "epoch": 0.7602138101341002,
+ "grad_norm": 0.6915012001991272,
+ "learning_rate": 0.00016990939218756683,
+ "loss": 1.4971,
+ "step": 760
+ },
+ {
+ "epoch": 0.761214091463224,
+ "grad_norm": 0.7862069606781006,
+ "learning_rate": 0.0001698344010887582,
+ "loss": 1.7468,
+ "step": 761
+ },
+ {
+ "epoch": 0.7622143727923478,
+ "grad_norm": 0.7318029403686523,
+ "learning_rate": 0.0001697593332545723,
+ "loss": 1.8143,
+ "step": 762
+ },
+ {
+ "epoch": 0.7632146541214717,
+ "grad_norm": 0.6758155226707458,
+ "learning_rate": 0.0001696841887674951,
+ "loss": 1.6652,
+ "step": 763
+ },
+ {
+ "epoch": 0.7642149354505955,
+ "grad_norm": 0.6853237748146057,
+ "learning_rate": 0.00016960896771009684,
+ "loss": 1.5176,
+ "step": 764
+ },
+ {
+ "epoch": 0.7652152167797193,
+ "grad_norm": 0.9686934351921082,
+ "learning_rate": 0.00016953367016503182,
+ "loss": 1.5366,
+ "step": 765
+ },
+ {
+ "epoch": 0.7662154981088432,
+ "grad_norm": 0.7232028841972351,
+ "learning_rate": 0.00016945829621503838,
+ "loss": 1.6932,
+ "step": 766
+ },
+ {
+ "epoch": 0.7672157794379669,
+ "grad_norm": 0.6606596112251282,
+ "learning_rate": 0.00016938284594293897,
+ "loss": 1.7051,
+ "step": 767
+ },
+ {
+ "epoch": 0.7682160607670907,
+ "grad_norm": 0.6337714195251465,
+ "learning_rate": 0.00016930731943163972,
+ "loss": 1.6505,
+ "step": 768
+ },
+ {
+ "epoch": 0.7692163420962146,
+ "grad_norm": 0.6292264461517334,
+ "learning_rate": 0.00016923171676413063,
+ "loss": 1.7207,
+ "step": 769
+ },
+ {
+ "epoch": 0.7702166234253384,
+ "grad_norm": 0.7183407545089722,
+ "learning_rate": 0.00016915603802348535,
+ "loss": 1.7025,
+ "step": 770
+ },
+ {
+ "epoch": 0.7712169047544621,
+ "grad_norm": 0.805107593536377,
+ "learning_rate": 0.00016908028329286112,
+ "loss": 1.592,
+ "step": 771
+ },
+ {
+ "epoch": 0.772217186083586,
+ "grad_norm": 0.725777804851532,
+ "learning_rate": 0.0001690044526554987,
+ "loss": 1.6714,
+ "step": 772
+ },
+ {
+ "epoch": 0.7732174674127098,
+ "grad_norm": 0.6801775097846985,
+ "learning_rate": 0.00016892854619472223,
+ "loss": 1.5047,
+ "step": 773
+ },
+ {
+ "epoch": 0.7742177487418337,
+ "grad_norm": 0.7701449990272522,
+ "learning_rate": 0.00016885256399393924,
+ "loss": 1.5506,
+ "step": 774
+ },
+ {
+ "epoch": 0.7752180300709575,
+ "grad_norm": 0.6954746842384338,
+ "learning_rate": 0.00016877650613664034,
+ "loss": 1.4859,
+ "step": 775
+ },
+ {
+ "epoch": 0.7762183114000812,
+ "grad_norm": 0.7431885004043579,
+ "learning_rate": 0.00016870037270639942,
+ "loss": 1.6087,
+ "step": 776
+ },
+ {
+ "epoch": 0.7772185927292051,
+ "grad_norm": 0.687329113483429,
+ "learning_rate": 0.0001686241637868734,
+ "loss": 1.7038,
+ "step": 777
+ },
+ {
+ "epoch": 0.7782188740583289,
+ "grad_norm": 0.6656787395477295,
+ "learning_rate": 0.00016854787946180198,
+ "loss": 1.5691,
+ "step": 778
+ },
+ {
+ "epoch": 0.7792191553874527,
+ "grad_norm": 0.7476064562797546,
+ "learning_rate": 0.00016847151981500789,
+ "loss": 1.4972,
+ "step": 779
+ },
+ {
+ "epoch": 0.7802194367165766,
+ "grad_norm": 0.7320332527160645,
+ "learning_rate": 0.00016839508493039657,
+ "loss": 1.7326,
+ "step": 780
+ },
+ {
+ "epoch": 0.7812197180457003,
+ "grad_norm": 0.6432293057441711,
+ "learning_rate": 0.00016831857489195618,
+ "loss": 1.542,
+ "step": 781
+ },
+ {
+ "epoch": 0.7822199993748241,
+ "grad_norm": 0.6751729846000671,
+ "learning_rate": 0.00016824198978375736,
+ "loss": 1.6864,
+ "step": 782
+ },
+ {
+ "epoch": 0.783220280703948,
+ "grad_norm": 0.770193338394165,
+ "learning_rate": 0.00016816532968995328,
+ "loss": 1.5318,
+ "step": 783
+ },
+ {
+ "epoch": 0.7842205620330718,
+ "grad_norm": 0.6820619106292725,
+ "learning_rate": 0.0001680885946947796,
+ "loss": 1.6004,
+ "step": 784
+ },
+ {
+ "epoch": 0.7852208433621957,
+ "grad_norm": 0.9120951294898987,
+ "learning_rate": 0.00016801178488255413,
+ "loss": 1.6506,
+ "step": 785
+ },
+ {
+ "epoch": 0.7862211246913194,
+ "grad_norm": 0.7819542288780212,
+ "learning_rate": 0.00016793490033767698,
+ "loss": 1.5292,
+ "step": 786
+ },
+ {
+ "epoch": 0.7872214060204432,
+ "grad_norm": 0.6647278666496277,
+ "learning_rate": 0.00016785794114463037,
+ "loss": 1.5941,
+ "step": 787
+ },
+ {
+ "epoch": 0.7882216873495671,
+ "grad_norm": 0.6874713897705078,
+ "learning_rate": 0.00016778090738797853,
+ "loss": 1.5543,
+ "step": 788
+ },
+ {
+ "epoch": 0.7892219686786909,
+ "grad_norm": 0.7759424448013306,
+ "learning_rate": 0.00016770379915236766,
+ "loss": 1.6788,
+ "step": 789
+ },
+ {
+ "epoch": 0.7902222500078147,
+ "grad_norm": 0.724583625793457,
+ "learning_rate": 0.00016762661652252567,
+ "loss": 1.5998,
+ "step": 790
+ },
+ {
+ "epoch": 0.7912225313369385,
+ "grad_norm": 0.7921720743179321,
+ "learning_rate": 0.00016754935958326244,
+ "loss": 1.5956,
+ "step": 791
+ },
+ {
+ "epoch": 0.7922228126660623,
+ "grad_norm": 0.6484968662261963,
+ "learning_rate": 0.00016747202841946928,
+ "loss": 1.5708,
+ "step": 792
+ },
+ {
+ "epoch": 0.7932230939951862,
+ "grad_norm": 0.6372153759002686,
+ "learning_rate": 0.00016739462311611919,
+ "loss": 1.5213,
+ "step": 793
+ },
+ {
+ "epoch": 0.79422337532431,
+ "grad_norm": 0.7025095224380493,
+ "learning_rate": 0.00016731714375826657,
+ "loss": 1.4701,
+ "step": 794
+ },
+ {
+ "epoch": 0.7952236566534338,
+ "grad_norm": 0.681094765663147,
+ "learning_rate": 0.00016723959043104728,
+ "loss": 1.5101,
+ "step": 795
+ },
+ {
+ "epoch": 0.7962239379825576,
+ "grad_norm": 0.7129995822906494,
+ "learning_rate": 0.00016716196321967832,
+ "loss": 1.6038,
+ "step": 796
+ },
+ {
+ "epoch": 0.7972242193116814,
+ "grad_norm": 0.7403759360313416,
+ "learning_rate": 0.00016708426220945802,
+ "loss": 1.5906,
+ "step": 797
+ },
+ {
+ "epoch": 0.7982245006408052,
+ "grad_norm": 0.6562372446060181,
+ "learning_rate": 0.00016700648748576574,
+ "loss": 1.6469,
+ "step": 798
+ },
+ {
+ "epoch": 0.7992247819699291,
+ "grad_norm": 0.839885413646698,
+ "learning_rate": 0.0001669286391340618,
+ "loss": 1.5385,
+ "step": 799
+ },
+ {
+ "epoch": 0.8002250632990529,
+ "grad_norm": 0.8687535524368286,
+ "learning_rate": 0.00016685071723988748,
+ "loss": 1.6759,
+ "step": 800
+ },
+ {
+ "epoch": 0.8012253446281766,
+ "grad_norm": 0.6825409531593323,
+ "learning_rate": 0.00016677272188886483,
+ "loss": 1.841,
+ "step": 801
+ },
+ {
+ "epoch": 0.8022256259573005,
+ "grad_norm": 0.6831037402153015,
+ "learning_rate": 0.00016669465316669667,
+ "loss": 1.5476,
+ "step": 802
+ },
+ {
+ "epoch": 0.8032259072864243,
+ "grad_norm": 0.6906002759933472,
+ "learning_rate": 0.00016661651115916642,
+ "loss": 1.6866,
+ "step": 803
+ },
+ {
+ "epoch": 0.8042261886155482,
+ "grad_norm": 0.7675560116767883,
+ "learning_rate": 0.00016653829595213794,
+ "loss": 1.5663,
+ "step": 804
+ },
+ {
+ "epoch": 0.805226469944672,
+ "grad_norm": 0.6594063639640808,
+ "learning_rate": 0.00016646000763155568,
+ "loss": 1.5247,
+ "step": 805
+ },
+ {
+ "epoch": 0.8062267512737957,
+ "grad_norm": 0.7470384836196899,
+ "learning_rate": 0.00016638164628344425,
+ "loss": 1.6468,
+ "step": 806
+ },
+ {
+ "epoch": 0.8072270326029196,
+ "grad_norm": 0.6874479651451111,
+ "learning_rate": 0.00016630321199390867,
+ "loss": 1.5948,
+ "step": 807
+ },
+ {
+ "epoch": 0.8082273139320434,
+ "grad_norm": 0.7301204204559326,
+ "learning_rate": 0.00016622470484913406,
+ "loss": 1.3922,
+ "step": 808
+ },
+ {
+ "epoch": 0.8092275952611672,
+ "grad_norm": 0.6781039834022522,
+ "learning_rate": 0.00016614612493538551,
+ "loss": 1.6054,
+ "step": 809
+ },
+ {
+ "epoch": 0.810227876590291,
+ "grad_norm": 0.6913226246833801,
+ "learning_rate": 0.00016606747233900815,
+ "loss": 1.5754,
+ "step": 810
+ },
+ {
+ "epoch": 0.8112281579194148,
+ "grad_norm": 0.667425811290741,
+ "learning_rate": 0.00016598874714642697,
+ "loss": 1.8492,
+ "step": 811
+ },
+ {
+ "epoch": 0.8122284392485386,
+ "grad_norm": 0.7662241458892822,
+ "learning_rate": 0.00016590994944414678,
+ "loss": 1.8034,
+ "step": 812
+ },
+ {
+ "epoch": 0.8132287205776625,
+ "grad_norm": 0.7574827075004578,
+ "learning_rate": 0.00016583107931875192,
+ "loss": 1.7435,
+ "step": 813
+ },
+ {
+ "epoch": 0.8142290019067863,
+ "grad_norm": 0.9005519151687622,
+ "learning_rate": 0.0001657521368569064,
+ "loss": 1.6769,
+ "step": 814
+ },
+ {
+ "epoch": 0.8152292832359102,
+ "grad_norm": 0.6895585656166077,
+ "learning_rate": 0.0001656731221453537,
+ "loss": 1.7562,
+ "step": 815
+ },
+ {
+ "epoch": 0.8162295645650339,
+ "grad_norm": 0.7573346495628357,
+ "learning_rate": 0.00016559403527091675,
+ "loss": 1.4748,
+ "step": 816
+ },
+ {
+ "epoch": 0.8172298458941577,
+ "grad_norm": 0.7698647975921631,
+ "learning_rate": 0.0001655148763204977,
+ "loss": 1.6174,
+ "step": 817
+ },
+ {
+ "epoch": 0.8182301272232816,
+ "grad_norm": 0.7975410223007202,
+ "learning_rate": 0.00016543564538107797,
+ "loss": 1.7924,
+ "step": 818
+ },
+ {
+ "epoch": 0.8192304085524054,
+ "grad_norm": 0.9687625169754028,
+ "learning_rate": 0.00016535634253971794,
+ "loss": 1.7725,
+ "step": 819
+ },
+ {
+ "epoch": 0.8202306898815291,
+ "grad_norm": 0.6777274012565613,
+ "learning_rate": 0.00016527696788355714,
+ "loss": 1.5018,
+ "step": 820
+ },
+ {
+ "epoch": 0.821230971210653,
+ "grad_norm": 0.6990464329719543,
+ "learning_rate": 0.00016519752149981397,
+ "loss": 1.5804,
+ "step": 821
+ },
+ {
+ "epoch": 0.8222312525397768,
+ "grad_norm": 0.8445940613746643,
+ "learning_rate": 0.0001651180034757856,
+ "loss": 1.8591,
+ "step": 822
+ },
+ {
+ "epoch": 0.8232315338689006,
+ "grad_norm": 0.8462644815444946,
+ "learning_rate": 0.00016503841389884798,
+ "loss": 1.7582,
+ "step": 823
+ },
+ {
+ "epoch": 0.8242318151980245,
+ "grad_norm": 0.7679311037063599,
+ "learning_rate": 0.00016495875285645566,
+ "loss": 1.5971,
+ "step": 824
+ },
+ {
+ "epoch": 0.8252320965271482,
+ "grad_norm": 0.7734447717666626,
+ "learning_rate": 0.00016487902043614173,
+ "loss": 1.714,
+ "step": 825
+ },
+ {
+ "epoch": 0.8262323778562721,
+ "grad_norm": 0.7890239953994751,
+ "learning_rate": 0.0001647992167255177,
+ "loss": 1.6876,
+ "step": 826
+ },
+ {
+ "epoch": 0.8272326591853959,
+ "grad_norm": 0.8530203104019165,
+ "learning_rate": 0.0001647193418122734,
+ "loss": 1.9096,
+ "step": 827
+ },
+ {
+ "epoch": 0.8282329405145197,
+ "grad_norm": 0.7828260064125061,
+ "learning_rate": 0.00016463939578417692,
+ "loss": 1.5518,
+ "step": 828
+ },
+ {
+ "epoch": 0.8292332218436436,
+ "grad_norm": 0.7015512585639954,
+ "learning_rate": 0.0001645593787290745,
+ "loss": 1.49,
+ "step": 829
+ },
+ {
+ "epoch": 0.8302335031727673,
+ "grad_norm": 0.694771409034729,
+ "learning_rate": 0.0001644792907348904,
+ "loss": 1.5506,
+ "step": 830
+ },
+ {
+ "epoch": 0.8312337845018911,
+ "grad_norm": 0.8167857527732849,
+ "learning_rate": 0.00016439913188962685,
+ "loss": 1.7798,
+ "step": 831
+ },
+ {
+ "epoch": 0.832234065831015,
+ "grad_norm": 0.6682108044624329,
+ "learning_rate": 0.0001643189022813639,
+ "loss": 1.6107,
+ "step": 832
+ },
+ {
+ "epoch": 0.8332343471601388,
+ "grad_norm": 0.8347259163856506,
+ "learning_rate": 0.0001642386019982594,
+ "loss": 1.7672,
+ "step": 833
+ },
+ {
+ "epoch": 0.8342346284892626,
+ "grad_norm": 0.6620945334434509,
+ "learning_rate": 0.00016415823112854883,
+ "loss": 1.6975,
+ "step": 834
+ },
+ {
+ "epoch": 0.8352349098183864,
+ "grad_norm": 0.7286327481269836,
+ "learning_rate": 0.00016407778976054526,
+ "loss": 1.5956,
+ "step": 835
+ },
+ {
+ "epoch": 0.8362351911475102,
+ "grad_norm": 0.6344440579414368,
+ "learning_rate": 0.0001639972779826392,
+ "loss": 1.6455,
+ "step": 836
+ },
+ {
+ "epoch": 0.8372354724766341,
+ "grad_norm": 0.6607793569564819,
+ "learning_rate": 0.0001639166958832985,
+ "loss": 1.6739,
+ "step": 837
+ },
+ {
+ "epoch": 0.8382357538057579,
+ "grad_norm": 0.6973574161529541,
+ "learning_rate": 0.00016383604355106837,
+ "loss": 1.8042,
+ "step": 838
+ },
+ {
+ "epoch": 0.8392360351348817,
+ "grad_norm": 0.7744210958480835,
+ "learning_rate": 0.00016375532107457108,
+ "loss": 1.528,
+ "step": 839
+ },
+ {
+ "epoch": 0.8402363164640055,
+ "grad_norm": 0.6944973468780518,
+ "learning_rate": 0.00016367452854250603,
+ "loss": 1.5498,
+ "step": 840
+ },
+ {
+ "epoch": 0.8412365977931293,
+ "grad_norm": 0.6730696558952332,
+ "learning_rate": 0.00016359366604364972,
+ "loss": 1.5849,
+ "step": 841
+ },
+ {
+ "epoch": 0.8422368791222531,
+ "grad_norm": 0.7051465511322021,
+ "learning_rate": 0.00016351273366685526,
+ "loss": 1.5972,
+ "step": 842
+ },
+ {
+ "epoch": 0.843237160451377,
+ "grad_norm": 0.7309426069259644,
+ "learning_rate": 0.00016343173150105278,
+ "loss": 1.4612,
+ "step": 843
+ },
+ {
+ "epoch": 0.8442374417805008,
+ "grad_norm": 0.7830431461334229,
+ "learning_rate": 0.00016335065963524897,
+ "loss": 1.7208,
+ "step": 844
+ },
+ {
+ "epoch": 0.8452377231096245,
+ "grad_norm": 0.8609834909439087,
+ "learning_rate": 0.0001632695181585272,
+ "loss": 1.8229,
+ "step": 845
+ },
+ {
+ "epoch": 0.8462380044387484,
+ "grad_norm": 0.7489060759544373,
+ "learning_rate": 0.00016318830716004722,
+ "loss": 1.6955,
+ "step": 846
+ },
+ {
+ "epoch": 0.8472382857678722,
+ "grad_norm": 0.636900782585144,
+ "learning_rate": 0.00016310702672904528,
+ "loss": 1.6664,
+ "step": 847
+ },
+ {
+ "epoch": 0.8482385670969961,
+ "grad_norm": 0.6423529386520386,
+ "learning_rate": 0.00016302567695483382,
+ "loss": 1.5356,
+ "step": 848
+ },
+ {
+ "epoch": 0.8492388484261199,
+ "grad_norm": 0.7380033731460571,
+ "learning_rate": 0.0001629442579268016,
+ "loss": 1.4482,
+ "step": 849
+ },
+ {
+ "epoch": 0.8502391297552436,
+ "grad_norm": 0.8258544206619263,
+ "learning_rate": 0.00016286276973441333,
+ "loss": 1.7058,
+ "step": 850
+ },
+ {
+ "epoch": 0.8512394110843675,
+ "grad_norm": 0.6473391056060791,
+ "learning_rate": 0.00016278121246720987,
+ "loss": 1.5374,
+ "step": 851
+ },
+ {
+ "epoch": 0.8522396924134913,
+ "grad_norm": 0.7097072005271912,
+ "learning_rate": 0.00016269958621480788,
+ "loss": 1.6786,
+ "step": 852
+ },
+ {
+ "epoch": 0.8532399737426151,
+ "grad_norm": 0.724993884563446,
+ "learning_rate": 0.0001626178910668998,
+ "loss": 1.6022,
+ "step": 853
+ },
+ {
+ "epoch": 0.854240255071739,
+ "grad_norm": 0.6800474524497986,
+ "learning_rate": 0.00016253612711325386,
+ "loss": 1.6382,
+ "step": 854
+ },
+ {
+ "epoch": 0.8552405364008627,
+ "grad_norm": 0.6339759826660156,
+ "learning_rate": 0.0001624542944437139,
+ "loss": 1.5641,
+ "step": 855
+ },
+ {
+ "epoch": 0.8562408177299866,
+ "grad_norm": 0.6792349219322205,
+ "learning_rate": 0.00016237239314819917,
+ "loss": 1.3713,
+ "step": 856
+ },
+ {
+ "epoch": 0.8572410990591104,
+ "grad_norm": 0.6544696688652039,
+ "learning_rate": 0.0001622904233167044,
+ "loss": 1.5639,
+ "step": 857
+ },
+ {
+ "epoch": 0.8582413803882342,
+ "grad_norm": 0.7736073732376099,
+ "learning_rate": 0.0001622083850392996,
+ "loss": 1.5454,
+ "step": 858
+ },
+ {
+ "epoch": 0.859241661717358,
+ "grad_norm": 0.8642422556877136,
+ "learning_rate": 0.00016212627840613003,
+ "loss": 1.6852,
+ "step": 859
+ },
+ {
+ "epoch": 0.8602419430464818,
+ "grad_norm": 0.6520773768424988,
+ "learning_rate": 0.000162044103507416,
+ "loss": 1.5335,
+ "step": 860
+ },
+ {
+ "epoch": 0.8612422243756056,
+ "grad_norm": 0.7647336721420288,
+ "learning_rate": 0.00016196186043345288,
+ "loss": 1.5578,
+ "step": 861
+ },
+ {
+ "epoch": 0.8622425057047295,
+ "grad_norm": 0.9621163010597229,
+ "learning_rate": 0.00016187954927461093,
+ "loss": 1.6976,
+ "step": 862
+ },
+ {
+ "epoch": 0.8632427870338533,
+ "grad_norm": 0.6847056746482849,
+ "learning_rate": 0.00016179717012133521,
+ "loss": 1.7118,
+ "step": 863
+ },
+ {
+ "epoch": 0.864243068362977,
+ "grad_norm": 0.7482467889785767,
+ "learning_rate": 0.00016171472306414554,
+ "loss": 1.6601,
+ "step": 864
+ },
+ {
+ "epoch": 0.8652433496921009,
+ "grad_norm": 0.7760444283485413,
+ "learning_rate": 0.00016163220819363628,
+ "loss": 1.5587,
+ "step": 865
+ },
+ {
+ "epoch": 0.8662436310212247,
+ "grad_norm": 0.8380980491638184,
+ "learning_rate": 0.00016154962560047643,
+ "loss": 1.7171,
+ "step": 866
+ },
+ {
+ "epoch": 0.8672439123503486,
+ "grad_norm": 0.6927618384361267,
+ "learning_rate": 0.00016146697537540924,
+ "loss": 1.7244,
+ "step": 867
+ },
+ {
+ "epoch": 0.8682441936794724,
+ "grad_norm": 0.7855746746063232,
+ "learning_rate": 0.0001613842576092524,
+ "loss": 1.5848,
+ "step": 868
+ },
+ {
+ "epoch": 0.8692444750085961,
+ "grad_norm": 0.6743006110191345,
+ "learning_rate": 0.00016130147239289778,
+ "loss": 1.6969,
+ "step": 869
+ },
+ {
+ "epoch": 0.87024475633772,
+ "grad_norm": 0.7060980200767517,
+ "learning_rate": 0.00016121861981731135,
+ "loss": 1.5632,
+ "step": 870
+ },
+ {
+ "epoch": 0.8712450376668438,
+ "grad_norm": 0.7673144340515137,
+ "learning_rate": 0.00016113569997353312,
+ "loss": 1.5687,
+ "step": 871
+ },
+ {
+ "epoch": 0.8722453189959676,
+ "grad_norm": 0.8105847239494324,
+ "learning_rate": 0.000161052712952677,
+ "loss": 1.6074,
+ "step": 872
+ },
+ {
+ "epoch": 0.8732456003250915,
+ "grad_norm": 0.6536850333213806,
+ "learning_rate": 0.0001609696588459307,
+ "loss": 1.5842,
+ "step": 873
+ },
+ {
+ "epoch": 0.8742458816542152,
+ "grad_norm": 0.6653574705123901,
+ "learning_rate": 0.00016088653774455568,
+ "loss": 1.4652,
+ "step": 874
+ },
+ {
+ "epoch": 0.875246162983339,
+ "grad_norm": 0.7202721238136292,
+ "learning_rate": 0.00016080334973988695,
+ "loss": 1.5212,
+ "step": 875
+ },
+ {
+ "epoch": 0.8762464443124629,
+ "grad_norm": 0.8218807578086853,
+ "learning_rate": 0.00016072009492333318,
+ "loss": 1.803,
+ "step": 876
+ },
+ {
+ "epoch": 0.8772467256415867,
+ "grad_norm": 0.6170400381088257,
+ "learning_rate": 0.0001606367733863763,
+ "loss": 1.5313,
+ "step": 877
+ },
+ {
+ "epoch": 0.8782470069707106,
+ "grad_norm": 0.6750448346138,
+ "learning_rate": 0.00016055338522057158,
+ "loss": 1.6183,
+ "step": 878
+ },
+ {
+ "epoch": 0.8792472882998343,
+ "grad_norm": 0.6602128148078918,
+ "learning_rate": 0.00016046993051754756,
+ "loss": 1.6669,
+ "step": 879
+ },
+ {
+ "epoch": 0.8802475696289581,
+ "grad_norm": 0.7064031958580017,
+ "learning_rate": 0.00016038640936900586,
+ "loss": 1.7458,
+ "step": 880
+ },
+ {
+ "epoch": 0.881247850958082,
+ "grad_norm": 0.5916783809661865,
+ "learning_rate": 0.00016030282186672116,
+ "loss": 1.4966,
+ "step": 881
+ },
+ {
+ "epoch": 0.8822481322872058,
+ "grad_norm": 0.7189202904701233,
+ "learning_rate": 0.00016021916810254097,
+ "loss": 1.5812,
+ "step": 882
+ },
+ {
+ "epoch": 0.8832484136163296,
+ "grad_norm": 0.7760966420173645,
+ "learning_rate": 0.00016013544816838565,
+ "loss": 1.6709,
+ "step": 883
+ },
+ {
+ "epoch": 0.8842486949454534,
+ "grad_norm": 0.6894650459289551,
+ "learning_rate": 0.00016005166215624827,
+ "loss": 1.6255,
+ "step": 884
+ },
+ {
+ "epoch": 0.8852489762745772,
+ "grad_norm": 0.6777058839797974,
+ "learning_rate": 0.0001599678101581945,
+ "loss": 1.7479,
+ "step": 885
+ },
+ {
+ "epoch": 0.886249257603701,
+ "grad_norm": 0.7056024670600891,
+ "learning_rate": 0.00015988389226636253,
+ "loss": 1.7896,
+ "step": 886
+ },
+ {
+ "epoch": 0.8872495389328249,
+ "grad_norm": 0.6465604305267334,
+ "learning_rate": 0.00015979990857296295,
+ "loss": 1.7363,
+ "step": 887
+ },
+ {
+ "epoch": 0.8882498202619487,
+ "grad_norm": 0.6703017950057983,
+ "learning_rate": 0.00015971585917027862,
+ "loss": 1.6617,
+ "step": 888
+ },
+ {
+ "epoch": 0.8892501015910725,
+ "grad_norm": 0.7116142511367798,
+ "learning_rate": 0.00015963174415066468,
+ "loss": 1.8232,
+ "step": 889
+ },
+ {
+ "epoch": 0.8902503829201963,
+ "grad_norm": 0.7552229762077332,
+ "learning_rate": 0.0001595475636065483,
+ "loss": 1.7847,
+ "step": 890
+ },
+ {
+ "epoch": 0.8912506642493201,
+ "grad_norm": 0.70728999376297,
+ "learning_rate": 0.00015946331763042867,
+ "loss": 1.5665,
+ "step": 891
+ },
+ {
+ "epoch": 0.892250945578444,
+ "grad_norm": 0.6701356768608093,
+ "learning_rate": 0.00015937900631487686,
+ "loss": 1.3572,
+ "step": 892
+ },
+ {
+ "epoch": 0.8932512269075678,
+ "grad_norm": 0.6960388422012329,
+ "learning_rate": 0.00015929462975253585,
+ "loss": 1.5815,
+ "step": 893
+ },
+ {
+ "epoch": 0.8942515082366915,
+ "grad_norm": 0.6505674719810486,
+ "learning_rate": 0.00015921018803612014,
+ "loss": 1.7499,
+ "step": 894
+ },
+ {
+ "epoch": 0.8952517895658154,
+ "grad_norm": 0.604205310344696,
+ "learning_rate": 0.0001591256812584159,
+ "loss": 1.6838,
+ "step": 895
+ },
+ {
+ "epoch": 0.8962520708949392,
+ "grad_norm": 0.5875198841094971,
+ "learning_rate": 0.00015904110951228082,
+ "loss": 1.5147,
+ "step": 896
+ },
+ {
+ "epoch": 0.897252352224063,
+ "grad_norm": 0.6970433592796326,
+ "learning_rate": 0.00015895647289064396,
+ "loss": 1.7767,
+ "step": 897
+ },
+ {
+ "epoch": 0.8982526335531869,
+ "grad_norm": 0.7364515066146851,
+ "learning_rate": 0.00015887177148650564,
+ "loss": 1.6672,
+ "step": 898
+ },
+ {
+ "epoch": 0.8992529148823106,
+ "grad_norm": 0.7843589186668396,
+ "learning_rate": 0.0001587870053929374,
+ "loss": 1.689,
+ "step": 899
+ },
+ {
+ "epoch": 0.9002531962114345,
+ "grad_norm": 0.6405196189880371,
+ "learning_rate": 0.00015870217470308188,
+ "loss": 1.5917,
+ "step": 900
+ },
+ {
+ "epoch": 0.9012534775405583,
+ "grad_norm": 0.7019757628440857,
+ "learning_rate": 0.0001586172795101526,
+ "loss": 1.5497,
+ "step": 901
+ },
+ {
+ "epoch": 0.9022537588696821,
+ "grad_norm": 0.8048270344734192,
+ "learning_rate": 0.00015853231990743406,
+ "loss": 1.5821,
+ "step": 902
+ },
+ {
+ "epoch": 0.903254040198806,
+ "grad_norm": 0.6245777606964111,
+ "learning_rate": 0.0001584472959882815,
+ "loss": 1.5688,
+ "step": 903
+ },
+ {
+ "epoch": 0.9042543215279297,
+ "grad_norm": 0.6584132313728333,
+ "learning_rate": 0.00015836220784612085,
+ "loss": 1.4555,
+ "step": 904
+ },
+ {
+ "epoch": 0.9052546028570535,
+ "grad_norm": 0.7710773944854736,
+ "learning_rate": 0.00015827705557444852,
+ "loss": 1.6416,
+ "step": 905
+ },
+ {
+ "epoch": 0.9062548841861774,
+ "grad_norm": 0.6738126277923584,
+ "learning_rate": 0.00015819183926683153,
+ "loss": 1.6272,
+ "step": 906
+ },
+ {
+ "epoch": 0.9072551655153012,
+ "grad_norm": 0.6698735356330872,
+ "learning_rate": 0.00015810655901690715,
+ "loss": 1.4778,
+ "step": 907
+ },
+ {
+ "epoch": 0.9082554468444249,
+ "grad_norm": 1.0088928937911987,
+ "learning_rate": 0.00015802121491838297,
+ "loss": 1.6854,
+ "step": 908
+ },
+ {
+ "epoch": 0.9092557281735488,
+ "grad_norm": 0.6948708891868591,
+ "learning_rate": 0.0001579358070650367,
+ "loss": 1.5673,
+ "step": 909
+ },
+ {
+ "epoch": 0.9102560095026726,
+ "grad_norm": 0.6728948950767517,
+ "learning_rate": 0.00015785033555071616,
+ "loss": 1.6646,
+ "step": 910
+ },
+ {
+ "epoch": 0.9112562908317965,
+ "grad_norm": 0.8096952438354492,
+ "learning_rate": 0.00015776480046933905,
+ "loss": 1.4675,
+ "step": 911
+ },
+ {
+ "epoch": 0.9122565721609203,
+ "grad_norm": 0.6625403761863708,
+ "learning_rate": 0.000157679201914893,
+ "loss": 1.4793,
+ "step": 912
+ },
+ {
+ "epoch": 0.913256853490044,
+ "grad_norm": 0.7129424810409546,
+ "learning_rate": 0.00015759353998143528,
+ "loss": 1.574,
+ "step": 913
+ },
+ {
+ "epoch": 0.9142571348191679,
+ "grad_norm": 0.6151349544525146,
+ "learning_rate": 0.00015750781476309288,
+ "loss": 1.5631,
+ "step": 914
+ },
+ {
+ "epoch": 0.9152574161482917,
+ "grad_norm": 0.7185074687004089,
+ "learning_rate": 0.00015742202635406235,
+ "loss": 1.8382,
+ "step": 915
+ },
+ {
+ "epoch": 0.9162576974774155,
+ "grad_norm": 0.7076066732406616,
+ "learning_rate": 0.00015733617484860963,
+ "loss": 1.5394,
+ "step": 916
+ },
+ {
+ "epoch": 0.9172579788065394,
+ "grad_norm": 0.7286276817321777,
+ "learning_rate": 0.00015725026034106996,
+ "loss": 1.8139,
+ "step": 917
+ },
+ {
+ "epoch": 0.9182582601356631,
+ "grad_norm": 0.757075846195221,
+ "learning_rate": 0.00015716428292584787,
+ "loss": 1.6768,
+ "step": 918
+ },
+ {
+ "epoch": 0.919258541464787,
+ "grad_norm": 0.6926739811897278,
+ "learning_rate": 0.00015707824269741702,
+ "loss": 1.4541,
+ "step": 919
+ },
+ {
+ "epoch": 0.9202588227939108,
+ "grad_norm": 0.6489847898483276,
+ "learning_rate": 0.00015699213975031996,
+ "loss": 1.4725,
+ "step": 920
+ },
+ {
+ "epoch": 0.9212591041230346,
+ "grad_norm": 0.7668707966804504,
+ "learning_rate": 0.0001569059741791684,
+ "loss": 1.4239,
+ "step": 921
+ },
+ {
+ "epoch": 0.9222593854521585,
+ "grad_norm": 0.736863911151886,
+ "learning_rate": 0.0001568197460786426,
+ "loss": 1.6117,
+ "step": 922
+ },
+ {
+ "epoch": 0.9232596667812822,
+ "grad_norm": 0.8462884426116943,
+ "learning_rate": 0.0001567334555434917,
+ "loss": 1.5025,
+ "step": 923
+ },
+ {
+ "epoch": 0.924259948110406,
+ "grad_norm": 0.7481950521469116,
+ "learning_rate": 0.0001566471026685334,
+ "loss": 1.5024,
+ "step": 924
+ },
+ {
+ "epoch": 0.9252602294395299,
+ "grad_norm": 0.6457516551017761,
+ "learning_rate": 0.00015656068754865387,
+ "loss": 1.4526,
+ "step": 925
+ },
+ {
+ "epoch": 0.9262605107686537,
+ "grad_norm": 0.809140682220459,
+ "learning_rate": 0.00015647421027880772,
+ "loss": 1.4449,
+ "step": 926
+ },
+ {
+ "epoch": 0.9272607920977775,
+ "grad_norm": 0.6967790126800537,
+ "learning_rate": 0.0001563876709540178,
+ "loss": 1.5552,
+ "step": 927
+ },
+ {
+ "epoch": 0.9282610734269013,
+ "grad_norm": 0.6858595609664917,
+ "learning_rate": 0.0001563010696693752,
+ "loss": 1.6202,
+ "step": 928
+ },
+ {
+ "epoch": 0.9292613547560251,
+ "grad_norm": 0.7033559679985046,
+ "learning_rate": 0.00015621440652003907,
+ "loss": 1.7186,
+ "step": 929
+ },
+ {
+ "epoch": 0.930261636085149,
+ "grad_norm": 0.6527283787727356,
+ "learning_rate": 0.00015612768160123652,
+ "loss": 1.5028,
+ "step": 930
+ },
+ {
+ "epoch": 0.9312619174142728,
+ "grad_norm": 0.7243602275848389,
+ "learning_rate": 0.00015604089500826257,
+ "loss": 1.6729,
+ "step": 931
+ },
+ {
+ "epoch": 0.9322621987433966,
+ "grad_norm": 0.6734297275543213,
+ "learning_rate": 0.00015595404683648,
+ "loss": 1.4731,
+ "step": 932
+ },
+ {
+ "epoch": 0.9332624800725204,
+ "grad_norm": 0.7641247510910034,
+ "learning_rate": 0.00015586713718131922,
+ "loss": 1.5851,
+ "step": 933
+ },
+ {
+ "epoch": 0.9342627614016442,
+ "grad_norm": 0.7062788009643555,
+ "learning_rate": 0.0001557801661382782,
+ "loss": 1.5735,
+ "step": 934
+ },
+ {
+ "epoch": 0.935263042730768,
+ "grad_norm": 0.6413556337356567,
+ "learning_rate": 0.00015569313380292248,
+ "loss": 1.5854,
+ "step": 935
+ },
+ {
+ "epoch": 0.9362633240598919,
+ "grad_norm": 0.645720362663269,
+ "learning_rate": 0.00015560604027088477,
+ "loss": 1.5072,
+ "step": 936
+ },
+ {
+ "epoch": 0.9372636053890157,
+ "grad_norm": 0.6726225018501282,
+ "learning_rate": 0.00015551888563786515,
+ "loss": 1.587,
+ "step": 937
+ },
+ {
+ "epoch": 0.9382638867181394,
+ "grad_norm": 0.7043680548667908,
+ "learning_rate": 0.00015543166999963076,
+ "loss": 1.6577,
+ "step": 938
+ },
+ {
+ "epoch": 0.9392641680472633,
+ "grad_norm": 0.7049617767333984,
+ "learning_rate": 0.0001553443934520159,
+ "loss": 1.7624,
+ "step": 939
+ },
+ {
+ "epoch": 0.9402644493763871,
+ "grad_norm": 0.7060776352882385,
+ "learning_rate": 0.00015525705609092157,
+ "loss": 1.6208,
+ "step": 940
+ },
+ {
+ "epoch": 0.941264730705511,
+ "grad_norm": 0.6215025186538696,
+ "learning_rate": 0.00015516965801231586,
+ "loss": 1.4645,
+ "step": 941
+ },
+ {
+ "epoch": 0.9422650120346348,
+ "grad_norm": 0.7021099328994751,
+ "learning_rate": 0.0001550821993122334,
+ "loss": 1.566,
+ "step": 942
+ },
+ {
+ "epoch": 0.9432652933637585,
+ "grad_norm": 0.6451042294502258,
+ "learning_rate": 0.0001549946800867755,
+ "loss": 1.7491,
+ "step": 943
+ },
+ {
+ "epoch": 0.9442655746928824,
+ "grad_norm": 0.7288572192192078,
+ "learning_rate": 0.00015490710043210997,
+ "loss": 1.6302,
+ "step": 944
+ },
+ {
+ "epoch": 0.9452658560220062,
+ "grad_norm": 0.7850833535194397,
+ "learning_rate": 0.00015481946044447099,
+ "loss": 1.5673,
+ "step": 945
+ },
+ {
+ "epoch": 0.94626613735113,
+ "grad_norm": 0.7459181547164917,
+ "learning_rate": 0.00015473176022015906,
+ "loss": 1.4529,
+ "step": 946
+ },
+ {
+ "epoch": 0.9472664186802539,
+ "grad_norm": 0.7002627849578857,
+ "learning_rate": 0.0001546439998555409,
+ "loss": 1.8814,
+ "step": 947
+ },
+ {
+ "epoch": 0.9482667000093776,
+ "grad_norm": 0.6664572358131409,
+ "learning_rate": 0.0001545561794470492,
+ "loss": 1.5337,
+ "step": 948
+ },
+ {
+ "epoch": 0.9492669813385014,
+ "grad_norm": 0.757116973400116,
+ "learning_rate": 0.00015446829909118275,
+ "loss": 1.5775,
+ "step": 949
+ },
+ {
+ "epoch": 0.9502672626676253,
+ "grad_norm": 0.7456643581390381,
+ "learning_rate": 0.00015438035888450623,
+ "loss": 1.525,
+ "step": 950
+ },
+ {
+ "epoch": 0.9512675439967491,
+ "grad_norm": 0.6722500920295715,
+ "learning_rate": 0.00015429235892364994,
+ "loss": 1.5059,
+ "step": 951
+ },
+ {
+ "epoch": 0.952267825325873,
+ "grad_norm": 0.7431210279464722,
+ "learning_rate": 0.00015420429930530996,
+ "loss": 1.6867,
+ "step": 952
+ },
+ {
+ "epoch": 0.9532681066549967,
+ "grad_norm": 0.751015305519104,
+ "learning_rate": 0.00015411618012624786,
+ "loss": 1.7371,
+ "step": 953
+ },
+ {
+ "epoch": 0.9542683879841205,
+ "grad_norm": 0.807579517364502,
+ "learning_rate": 0.00015402800148329071,
+ "loss": 1.7353,
+ "step": 954
+ },
+ {
+ "epoch": 0.9552686693132444,
+ "grad_norm": 0.608161449432373,
+ "learning_rate": 0.00015393976347333088,
+ "loss": 1.3074,
+ "step": 955
+ },
+ {
+ "epoch": 0.9562689506423682,
+ "grad_norm": 0.7092815637588501,
+ "learning_rate": 0.00015385146619332596,
+ "loss": 1.676,
+ "step": 956
+ },
+ {
+ "epoch": 0.9572692319714919,
+ "grad_norm": 0.7639429569244385,
+ "learning_rate": 0.00015376310974029873,
+ "loss": 1.6452,
+ "step": 957
+ },
+ {
+ "epoch": 0.9582695133006158,
+ "grad_norm": 0.7333659529685974,
+ "learning_rate": 0.00015367469421133695,
+ "loss": 1.6821,
+ "step": 958
+ },
+ {
+ "epoch": 0.9592697946297396,
+ "grad_norm": 0.7246838212013245,
+ "learning_rate": 0.00015358621970359325,
+ "loss": 1.5078,
+ "step": 959
+ },
+ {
+ "epoch": 0.9602700759588634,
+ "grad_norm": 0.7209622859954834,
+ "learning_rate": 0.00015349768631428519,
+ "loss": 1.5617,
+ "step": 960
+ },
+ {
+ "epoch": 0.9612703572879873,
+ "grad_norm": 0.7034916877746582,
+ "learning_rate": 0.00015340909414069488,
+ "loss": 1.4711,
+ "step": 961
+ },
+ {
+ "epoch": 0.962270638617111,
+ "grad_norm": 0.7311360239982605,
+ "learning_rate": 0.00015332044328016914,
+ "loss": 1.6488,
+ "step": 962
+ },
+ {
+ "epoch": 0.9632709199462349,
+ "grad_norm": 0.6668992638587952,
+ "learning_rate": 0.0001532317338301192,
+ "loss": 1.6804,
+ "step": 963
+ },
+ {
+ "epoch": 0.9642712012753587,
+ "grad_norm": 0.6265329122543335,
+ "learning_rate": 0.00015314296588802076,
+ "loss": 1.8169,
+ "step": 964
+ },
+ {
+ "epoch": 0.9652714826044825,
+ "grad_norm": 0.6945448517799377,
+ "learning_rate": 0.00015305413955141365,
+ "loss": 1.8041,
+ "step": 965
+ },
+ {
+ "epoch": 0.9662717639336064,
+ "grad_norm": 0.6718643307685852,
+ "learning_rate": 0.00015296525491790205,
+ "loss": 1.3486,
+ "step": 966
+ },
+ {
+ "epoch": 0.9672720452627301,
+ "grad_norm": 0.6232700943946838,
+ "learning_rate": 0.00015287631208515406,
+ "loss": 1.5672,
+ "step": 967
+ },
+ {
+ "epoch": 0.9682723265918539,
+ "grad_norm": 0.7481172680854797,
+ "learning_rate": 0.00015278731115090171,
+ "loss": 1.5992,
+ "step": 968
+ },
+ {
+ "epoch": 0.9692726079209778,
+ "grad_norm": 0.6585466861724854,
+ "learning_rate": 0.00015269825221294098,
+ "loss": 1.6403,
+ "step": 969
+ },
+ {
+ "epoch": 0.9702728892501016,
+ "grad_norm": 0.7587956786155701,
+ "learning_rate": 0.00015260913536913154,
+ "loss": 1.7991,
+ "step": 970
+ },
+ {
+ "epoch": 0.9712731705792254,
+ "grad_norm": 0.672698974609375,
+ "learning_rate": 0.00015251996071739664,
+ "loss": 1.4311,
+ "step": 971
+ },
+ {
+ "epoch": 0.9722734519083492,
+ "grad_norm": 0.7597199082374573,
+ "learning_rate": 0.00015243072835572318,
+ "loss": 1.5692,
+ "step": 972
+ },
+ {
+ "epoch": 0.973273733237473,
+ "grad_norm": 0.7342745661735535,
+ "learning_rate": 0.0001523414383821613,
+ "loss": 1.6364,
+ "step": 973
+ },
+ {
+ "epoch": 0.9742740145665969,
+ "grad_norm": 0.6640815138816833,
+ "learning_rate": 0.00015225209089482462,
+ "loss": 1.5113,
+ "step": 974
+ },
+ {
+ "epoch": 0.9752742958957207,
+ "grad_norm": 0.6298378109931946,
+ "learning_rate": 0.0001521626859918898,
+ "loss": 1.4822,
+ "step": 975
+ },
+ {
+ "epoch": 0.9762745772248445,
+ "grad_norm": 0.6862055659294128,
+ "learning_rate": 0.00015207322377159668,
+ "loss": 1.6159,
+ "step": 976
+ },
+ {
+ "epoch": 0.9772748585539683,
+ "grad_norm": 0.6377236843109131,
+ "learning_rate": 0.00015198370433224805,
+ "loss": 1.6046,
+ "step": 977
+ },
+ {
+ "epoch": 0.9782751398830921,
+ "grad_norm": 0.620070219039917,
+ "learning_rate": 0.00015189412777220958,
+ "loss": 1.589,
+ "step": 978
+ },
+ {
+ "epoch": 0.9792754212122159,
+ "grad_norm": 0.7776119112968445,
+ "learning_rate": 0.00015180449418990976,
+ "loss": 1.485,
+ "step": 979
+ },
+ {
+ "epoch": 0.9802757025413398,
+ "grad_norm": 0.8258413076400757,
+ "learning_rate": 0.00015171480368383964,
+ "loss": 1.5615,
+ "step": 980
+ },
+ {
+ "epoch": 0.9812759838704636,
+ "grad_norm": 0.7297958135604858,
+ "learning_rate": 0.00015162505635255287,
+ "loss": 1.5408,
+ "step": 981
+ },
+ {
+ "epoch": 0.9822762651995874,
+ "grad_norm": 0.5848103165626526,
+ "learning_rate": 0.00015153525229466555,
+ "loss": 1.6821,
+ "step": 982
+ },
+ {
+ "epoch": 0.9832765465287112,
+ "grad_norm": 0.7375655174255371,
+ "learning_rate": 0.00015144539160885613,
+ "loss": 1.7568,
+ "step": 983
+ },
+ {
+ "epoch": 0.984276827857835,
+ "grad_norm": 0.7466885447502136,
+ "learning_rate": 0.00015135547439386516,
+ "loss": 1.5805,
+ "step": 984
+ },
+ {
+ "epoch": 0.9852771091869589,
+ "grad_norm": 0.6645593047142029,
+ "learning_rate": 0.0001512655007484955,
+ "loss": 1.6776,
+ "step": 985
+ },
+ {
+ "epoch": 0.9862773905160827,
+ "grad_norm": 0.7973874807357788,
+ "learning_rate": 0.00015117547077161185,
+ "loss": 1.4931,
+ "step": 986
+ },
+ {
+ "epoch": 0.9872776718452064,
+ "grad_norm": 0.685391902923584,
+ "learning_rate": 0.0001510853845621409,
+ "loss": 1.6254,
+ "step": 987
+ },
+ {
+ "epoch": 0.9882779531743303,
+ "grad_norm": 0.6562414765357971,
+ "learning_rate": 0.00015099524221907107,
+ "loss": 1.6677,
+ "step": 988
+ },
+ {
+ "epoch": 0.9892782345034541,
+ "grad_norm": 0.6216359734535217,
+ "learning_rate": 0.0001509050438414525,
+ "loss": 1.6107,
+ "step": 989
+ },
+ {
+ "epoch": 0.9902785158325779,
+ "grad_norm": 0.7108810544013977,
+ "learning_rate": 0.00015081478952839693,
+ "loss": 1.5268,
+ "step": 990
+ },
+ {
+ "epoch": 0.9912787971617018,
+ "grad_norm": 0.7076026797294617,
+ "learning_rate": 0.00015072447937907753,
+ "loss": 1.3716,
+ "step": 991
+ },
+ {
+ "epoch": 0.9922790784908255,
+ "grad_norm": 0.6056272983551025,
+ "learning_rate": 0.00015063411349272877,
+ "loss": 1.4931,
+ "step": 992
+ },
+ {
+ "epoch": 0.9932793598199494,
+ "grad_norm": 0.726671576499939,
+ "learning_rate": 0.00015054369196864644,
+ "loss": 1.6409,
+ "step": 993
+ },
+ {
+ "epoch": 0.9942796411490732,
+ "grad_norm": 0.7019214630126953,
+ "learning_rate": 0.00015045321490618748,
+ "loss": 1.4476,
+ "step": 994
+ },
+ {
+ "epoch": 0.995279922478197,
+ "grad_norm": 0.755043625831604,
+ "learning_rate": 0.00015036268240476978,
+ "loss": 1.6674,
+ "step": 995
+ },
+ {
+ "epoch": 0.9962802038073209,
+ "grad_norm": 0.7450313568115234,
+ "learning_rate": 0.00015027209456387218,
+ "loss": 1.3706,
+ "step": 996
+ },
+ {
+ "epoch": 0.9972804851364446,
+ "grad_norm": 0.6804680228233337,
+ "learning_rate": 0.00015018145148303438,
+ "loss": 1.3878,
+ "step": 997
+ },
+ {
+ "epoch": 0.9982807664655684,
+ "grad_norm": 0.7353954315185547,
+ "learning_rate": 0.00015009075326185667,
+ "loss": 1.8656,
+ "step": 998
+ },
+ {
+ "epoch": 0.9992810477946923,
+ "grad_norm": 0.7213340401649475,
+ "learning_rate": 0.00015000000000000001,
+ "loss": 1.6031,
+ "step": 999
+ },
+ {
+ "epoch": 1.0002813291238162,
+ "grad_norm": 0.7066403031349182,
+ "learning_rate": 0.00014990919179718584,
+ "loss": 1.3663,
+ "step": 1000
+ },
+ {
+ "epoch": 1.0012816104529398,
+ "grad_norm": 0.6104635000228882,
+ "learning_rate": 0.00014981832875319597,
+ "loss": 1.3155,
+ "step": 1001
+ },
+ {
+ "epoch": 1.0022818917820637,
+ "grad_norm": 0.7524546384811401,
+ "learning_rate": 0.00014972741096787242,
+ "loss": 1.2042,
+ "step": 1002
+ },
+ {
+ "epoch": 1.0032821731111876,
+ "grad_norm": 0.6831395626068115,
+ "learning_rate": 0.0001496364385411174,
+ "loss": 1.3909,
+ "step": 1003
+ },
+ {
+ "epoch": 1.0042824544403113,
+ "grad_norm": 0.6223152875900269,
+ "learning_rate": 0.0001495454115728932,
+ "loss": 1.2693,
+ "step": 1004
+ },
+ {
+ "epoch": 1.0052827357694352,
+ "grad_norm": 0.6630414128303528,
+ "learning_rate": 0.0001494543301632219,
+ "loss": 1.5871,
+ "step": 1005
+ },
+ {
+ "epoch": 1.006283017098559,
+ "grad_norm": 0.6211387515068054,
+ "learning_rate": 0.00014936319441218555,
+ "loss": 1.5096,
+ "step": 1006
+ },
+ {
+ "epoch": 1.0072832984276827,
+ "grad_norm": 0.7009375095367432,
+ "learning_rate": 0.0001492720044199259,
+ "loss": 1.4553,
+ "step": 1007
+ },
+ {
+ "epoch": 1.0082835797568066,
+ "grad_norm": 0.607667088508606,
+ "learning_rate": 0.0001491807602866442,
+ "loss": 1.4655,
+ "step": 1008
+ },
+ {
+ "epoch": 1.0092838610859305,
+ "grad_norm": 0.7168284058570862,
+ "learning_rate": 0.00014908946211260123,
+ "loss": 1.32,
+ "step": 1009
+ },
+ {
+ "epoch": 1.0102841424150542,
+ "grad_norm": 0.6472702622413635,
+ "learning_rate": 0.00014899810999811726,
+ "loss": 1.418,
+ "step": 1010
+ },
+ {
+ "epoch": 1.011284423744178,
+ "grad_norm": 0.6901958584785461,
+ "learning_rate": 0.0001489067040435717,
+ "loss": 1.5842,
+ "step": 1011
+ },
+ {
+ "epoch": 1.012284705073302,
+ "grad_norm": 0.6948314905166626,
+ "learning_rate": 0.00014881524434940313,
+ "loss": 1.3352,
+ "step": 1012
+ },
+ {
+ "epoch": 1.0132849864024256,
+ "grad_norm": 0.6064580082893372,
+ "learning_rate": 0.0001487237310161093,
+ "loss": 1.2467,
+ "step": 1013
+ },
+ {
+ "epoch": 1.0142852677315495,
+ "grad_norm": 0.5783251523971558,
+ "learning_rate": 0.0001486321641442467,
+ "loss": 1.3932,
+ "step": 1014
+ },
+ {
+ "epoch": 1.0152855490606734,
+ "grad_norm": 0.6915367245674133,
+ "learning_rate": 0.00014854054383443081,
+ "loss": 1.5062,
+ "step": 1015
+ },
+ {
+ "epoch": 1.016285830389797,
+ "grad_norm": 0.7143461108207703,
+ "learning_rate": 0.00014844887018733582,
+ "loss": 1.3284,
+ "step": 1016
+ },
+ {
+ "epoch": 1.017286111718921,
+ "grad_norm": 0.7030971050262451,
+ "learning_rate": 0.00014835714330369446,
+ "loss": 1.5919,
+ "step": 1017
+ },
+ {
+ "epoch": 1.0182863930480448,
+ "grad_norm": 0.7102513909339905,
+ "learning_rate": 0.00014826536328429795,
+ "loss": 1.4448,
+ "step": 1018
+ },
+ {
+ "epoch": 1.0192866743771685,
+ "grad_norm": 0.6152640581130981,
+ "learning_rate": 0.000148173530229996,
+ "loss": 1.4771,
+ "step": 1019
+ },
+ {
+ "epoch": 1.0202869557062924,
+ "grad_norm": 0.6302015781402588,
+ "learning_rate": 0.00014808164424169647,
+ "loss": 1.3969,
+ "step": 1020
+ },
+ {
+ "epoch": 1.0212872370354162,
+ "grad_norm": 0.8721572756767273,
+ "learning_rate": 0.0001479897054203655,
+ "loss": 1.3515,
+ "step": 1021
+ },
+ {
+ "epoch": 1.0222875183645401,
+ "grad_norm": 1.096592903137207,
+ "learning_rate": 0.00014789771386702717,
+ "loss": 1.4757,
+ "step": 1022
+ },
+ {
+ "epoch": 1.0232877996936638,
+ "grad_norm": 0.7684335112571716,
+ "learning_rate": 0.0001478056696827636,
+ "loss": 1.2521,
+ "step": 1023
+ },
+ {
+ "epoch": 1.0242880810227877,
+ "grad_norm": 0.6189197301864624,
+ "learning_rate": 0.0001477135729687147,
+ "loss": 1.4304,
+ "step": 1024
+ },
+ {
+ "epoch": 1.0252883623519116,
+ "grad_norm": 0.6061127781867981,
+ "learning_rate": 0.0001476214238260781,
+ "loss": 1.4236,
+ "step": 1025
+ },
+ {
+ "epoch": 1.0262886436810352,
+ "grad_norm": 0.5413788557052612,
+ "learning_rate": 0.000147529222356109,
+ "loss": 1.1392,
+ "step": 1026
+ },
+ {
+ "epoch": 1.0272889250101591,
+ "grad_norm": 0.6879326105117798,
+ "learning_rate": 0.0001474369686601202,
+ "loss": 1.3966,
+ "step": 1027
+ },
+ {
+ "epoch": 1.028289206339283,
+ "grad_norm": 0.817315936088562,
+ "learning_rate": 0.0001473446628394818,
+ "loss": 1.6747,
+ "step": 1028
+ },
+ {
+ "epoch": 1.0292894876684067,
+ "grad_norm": 0.7139183282852173,
+ "learning_rate": 0.00014725230499562119,
+ "loss": 1.5432,
+ "step": 1029
+ },
+ {
+ "epoch": 1.0302897689975306,
+ "grad_norm": 0.7536730766296387,
+ "learning_rate": 0.00014715989523002296,
+ "loss": 1.5839,
+ "step": 1030
+ },
+ {
+ "epoch": 1.0312900503266544,
+ "grad_norm": 0.7000136375427246,
+ "learning_rate": 0.00014706743364422878,
+ "loss": 1.3519,
+ "step": 1031
+ },
+ {
+ "epoch": 1.032290331655778,
+ "grad_norm": 0.6579506993293762,
+ "learning_rate": 0.00014697492033983707,
+ "loss": 1.3622,
+ "step": 1032
+ },
+ {
+ "epoch": 1.033290612984902,
+ "grad_norm": 0.6257238984107971,
+ "learning_rate": 0.00014688235541850337,
+ "loss": 1.4393,
+ "step": 1033
+ },
+ {
+ "epoch": 1.0342908943140259,
+ "grad_norm": 0.749273955821991,
+ "learning_rate": 0.0001467897389819397,
+ "loss": 1.5201,
+ "step": 1034
+ },
+ {
+ "epoch": 1.0352911756431495,
+ "grad_norm": 0.7008610963821411,
+ "learning_rate": 0.00014669707113191483,
+ "loss": 1.3041,
+ "step": 1035
+ },
+ {
+ "epoch": 1.0362914569722734,
+ "grad_norm": 0.6838043332099915,
+ "learning_rate": 0.0001466043519702539,
+ "loss": 1.435,
+ "step": 1036
+ },
+ {
+ "epoch": 1.0372917383013973,
+ "grad_norm": 0.6197534799575806,
+ "learning_rate": 0.00014651158159883855,
+ "loss": 1.3806,
+ "step": 1037
+ },
+ {
+ "epoch": 1.038292019630521,
+ "grad_norm": 0.6906173825263977,
+ "learning_rate": 0.0001464187601196066,
+ "loss": 1.3898,
+ "step": 1038
+ },
+ {
+ "epoch": 1.0392923009596449,
+ "grad_norm": 0.5627701282501221,
+ "learning_rate": 0.00014632588763455212,
+ "loss": 1.3949,
+ "step": 1039
+ },
+ {
+ "epoch": 1.0402925822887688,
+ "grad_norm": 0.6588866710662842,
+ "learning_rate": 0.00014623296424572517,
+ "loss": 1.4041,
+ "step": 1040
+ },
+ {
+ "epoch": 1.0412928636178926,
+ "grad_norm": 0.7941678762435913,
+ "learning_rate": 0.00014613999005523174,
+ "loss": 1.429,
+ "step": 1041
+ },
+ {
+ "epoch": 1.0422931449470163,
+ "grad_norm": 0.5834561586380005,
+ "learning_rate": 0.00014604696516523361,
+ "loss": 1.4007,
+ "step": 1042
+ },
+ {
+ "epoch": 1.0432934262761402,
+ "grad_norm": 0.5992164015769958,
+ "learning_rate": 0.00014595388967794835,
+ "loss": 1.4029,
+ "step": 1043
+ },
+ {
+ "epoch": 1.044293707605264,
+ "grad_norm": 0.6714745759963989,
+ "learning_rate": 0.00014586076369564908,
+ "loss": 1.4421,
+ "step": 1044
+ },
+ {
+ "epoch": 1.0452939889343877,
+ "grad_norm": 0.6675744652748108,
+ "learning_rate": 0.00014576758732066442,
+ "loss": 1.4663,
+ "step": 1045
+ },
+ {
+ "epoch": 1.0462942702635116,
+ "grad_norm": 0.6605483293533325,
+ "learning_rate": 0.00014567436065537835,
+ "loss": 1.3919,
+ "step": 1046
+ },
+ {
+ "epoch": 1.0472945515926355,
+ "grad_norm": 0.6836503744125366,
+ "learning_rate": 0.00014558108380223012,
+ "loss": 1.3428,
+ "step": 1047
+ },
+ {
+ "epoch": 1.0482948329217592,
+ "grad_norm": 0.6451092958450317,
+ "learning_rate": 0.00014548775686371412,
+ "loss": 1.3717,
+ "step": 1048
+ },
+ {
+ "epoch": 1.049295114250883,
+ "grad_norm": 0.6579246520996094,
+ "learning_rate": 0.00014539437994237977,
+ "loss": 1.7364,
+ "step": 1049
+ },
+ {
+ "epoch": 1.050295395580007,
+ "grad_norm": 0.625912070274353,
+ "learning_rate": 0.00014530095314083143,
+ "loss": 1.5574,
+ "step": 1050
+ },
+ {
+ "epoch": 1.0512956769091306,
+ "grad_norm": 0.7133544087409973,
+ "learning_rate": 0.00014520747656172824,
+ "loss": 1.6031,
+ "step": 1051
+ },
+ {
+ "epoch": 1.0522959582382545,
+ "grad_norm": 0.6956666111946106,
+ "learning_rate": 0.00014511395030778406,
+ "loss": 1.6075,
+ "step": 1052
+ },
+ {
+ "epoch": 1.0532962395673784,
+ "grad_norm": 0.7082141041755676,
+ "learning_rate": 0.00014502037448176734,
+ "loss": 1.3839,
+ "step": 1053
+ },
+ {
+ "epoch": 1.054296520896502,
+ "grad_norm": 0.696561872959137,
+ "learning_rate": 0.000144926749186501,
+ "loss": 1.6738,
+ "step": 1054
+ },
+ {
+ "epoch": 1.055296802225626,
+ "grad_norm": 0.6995558142662048,
+ "learning_rate": 0.00014483307452486227,
+ "loss": 1.4732,
+ "step": 1055
+ },
+ {
+ "epoch": 1.0562970835547498,
+ "grad_norm": 0.7434210181236267,
+ "learning_rate": 0.0001447393505997827,
+ "loss": 1.4207,
+ "step": 1056
+ },
+ {
+ "epoch": 1.0572973648838735,
+ "grad_norm": 0.6679419279098511,
+ "learning_rate": 0.00014464557751424793,
+ "loss": 1.397,
+ "step": 1057
+ },
+ {
+ "epoch": 1.0582976462129974,
+ "grad_norm": 0.6747702360153198,
+ "learning_rate": 0.00014455175537129758,
+ "loss": 1.5247,
+ "step": 1058
+ },
+ {
+ "epoch": 1.0592979275421213,
+ "grad_norm": 0.6184663772583008,
+ "learning_rate": 0.00014445788427402528,
+ "loss": 1.2086,
+ "step": 1059
+ },
+ {
+ "epoch": 1.0602982088712452,
+ "grad_norm": 0.6546644568443298,
+ "learning_rate": 0.00014436396432557835,
+ "loss": 1.3795,
+ "step": 1060
+ },
+ {
+ "epoch": 1.0612984902003688,
+ "grad_norm": 0.6418478488922119,
+ "learning_rate": 0.00014426999562915782,
+ "loss": 1.3997,
+ "step": 1061
+ },
+ {
+ "epoch": 1.0622987715294927,
+ "grad_norm": 0.6456977725028992,
+ "learning_rate": 0.00014417597828801832,
+ "loss": 1.347,
+ "step": 1062
+ },
+ {
+ "epoch": 1.0632990528586166,
+ "grad_norm": 0.7379586696624756,
+ "learning_rate": 0.0001440819124054679,
+ "loss": 1.4168,
+ "step": 1063
+ },
+ {
+ "epoch": 1.0642993341877403,
+ "grad_norm": 0.583483099937439,
+ "learning_rate": 0.00014398779808486793,
+ "loss": 1.3724,
+ "step": 1064
+ },
+ {
+ "epoch": 1.0652996155168641,
+ "grad_norm": 0.8881146311759949,
+ "learning_rate": 0.00014389363542963306,
+ "loss": 1.2834,
+ "step": 1065
+ },
+ {
+ "epoch": 1.066299896845988,
+ "grad_norm": 0.6458824276924133,
+ "learning_rate": 0.000143799424543231,
+ "loss": 1.2557,
+ "step": 1066
+ },
+ {
+ "epoch": 1.0673001781751117,
+ "grad_norm": 0.8149404525756836,
+ "learning_rate": 0.0001437051655291825,
+ "loss": 1.5179,
+ "step": 1067
+ },
+ {
+ "epoch": 1.0683004595042356,
+ "grad_norm": 0.8752502202987671,
+ "learning_rate": 0.0001436108584910611,
+ "loss": 1.3922,
+ "step": 1068
+ },
+ {
+ "epoch": 1.0693007408333595,
+ "grad_norm": 0.6741296648979187,
+ "learning_rate": 0.0001435165035324933,
+ "loss": 1.464,
+ "step": 1069
+ },
+ {
+ "epoch": 1.0703010221624831,
+ "grad_norm": 0.6555476784706116,
+ "learning_rate": 0.000143422100757158,
+ "loss": 1.3172,
+ "step": 1070
+ },
+ {
+ "epoch": 1.071301303491607,
+ "grad_norm": 0.660168468952179,
+ "learning_rate": 0.00014332765026878687,
+ "loss": 1.3089,
+ "step": 1071
+ },
+ {
+ "epoch": 1.072301584820731,
+ "grad_norm": 0.8213777542114258,
+ "learning_rate": 0.0001432331521711639,
+ "loss": 1.4487,
+ "step": 1072
+ },
+ {
+ "epoch": 1.0733018661498546,
+ "grad_norm": 0.6642137765884399,
+ "learning_rate": 0.00014313860656812536,
+ "loss": 1.1624,
+ "step": 1073
+ },
+ {
+ "epoch": 1.0743021474789785,
+ "grad_norm": 0.6304247975349426,
+ "learning_rate": 0.00014304401356355983,
+ "loss": 1.2725,
+ "step": 1074
+ },
+ {
+ "epoch": 1.0753024288081023,
+ "grad_norm": 0.6976219415664673,
+ "learning_rate": 0.00014294937326140788,
+ "loss": 1.6664,
+ "step": 1075
+ },
+ {
+ "epoch": 1.076302710137226,
+ "grad_norm": 0.6528605818748474,
+ "learning_rate": 0.00014285468576566207,
+ "loss": 1.3489,
+ "step": 1076
+ },
+ {
+ "epoch": 1.07730299146635,
+ "grad_norm": 0.7203120589256287,
+ "learning_rate": 0.00014275995118036693,
+ "loss": 1.4319,
+ "step": 1077
+ },
+ {
+ "epoch": 1.0783032727954738,
+ "grad_norm": 0.8259358406066895,
+ "learning_rate": 0.00014266516960961852,
+ "loss": 1.3707,
+ "step": 1078
+ },
+ {
+ "epoch": 1.0793035541245974,
+ "grad_norm": 0.9485010504722595,
+ "learning_rate": 0.00014257034115756472,
+ "loss": 1.6787,
+ "step": 1079
+ },
+ {
+ "epoch": 1.0803038354537213,
+ "grad_norm": 0.6732786893844604,
+ "learning_rate": 0.0001424754659284048,
+ "loss": 1.4184,
+ "step": 1080
+ },
+ {
+ "epoch": 1.0813041167828452,
+ "grad_norm": 0.6673377752304077,
+ "learning_rate": 0.0001423805440263895,
+ "loss": 1.5084,
+ "step": 1081
+ },
+ {
+ "epoch": 1.0823043981119689,
+ "grad_norm": 0.6682411432266235,
+ "learning_rate": 0.0001422855755558208,
+ "loss": 1.4034,
+ "step": 1082
+ },
+ {
+ "epoch": 1.0833046794410928,
+ "grad_norm": 0.6940018534660339,
+ "learning_rate": 0.00014219056062105193,
+ "loss": 1.6816,
+ "step": 1083
+ },
+ {
+ "epoch": 1.0843049607702167,
+ "grad_norm": 0.7052391767501831,
+ "learning_rate": 0.0001420954993264871,
+ "loss": 1.5849,
+ "step": 1084
+ },
+ {
+ "epoch": 1.0853052420993405,
+ "grad_norm": 0.7090102434158325,
+ "learning_rate": 0.00014200039177658145,
+ "loss": 1.2906,
+ "step": 1085
+ },
+ {
+ "epoch": 1.0863055234284642,
+ "grad_norm": 0.7664905190467834,
+ "learning_rate": 0.000141905238075841,
+ "loss": 1.5504,
+ "step": 1086
+ },
+ {
+ "epoch": 1.087305804757588,
+ "grad_norm": 0.6557911038398743,
+ "learning_rate": 0.00014181003832882248,
+ "loss": 1.5846,
+ "step": 1087
+ },
+ {
+ "epoch": 1.088306086086712,
+ "grad_norm": 0.6342834234237671,
+ "learning_rate": 0.00014171479264013311,
+ "loss": 1.2405,
+ "step": 1088
+ },
+ {
+ "epoch": 1.0893063674158356,
+ "grad_norm": 0.7152488827705383,
+ "learning_rate": 0.00014161950111443077,
+ "loss": 1.5047,
+ "step": 1089
+ },
+ {
+ "epoch": 1.0903066487449595,
+ "grad_norm": 0.6031161546707153,
+ "learning_rate": 0.00014152416385642357,
+ "loss": 1.3203,
+ "step": 1090
+ },
+ {
+ "epoch": 1.0913069300740834,
+ "grad_norm": 0.6475042700767517,
+ "learning_rate": 0.00014142878097086995,
+ "loss": 1.191,
+ "step": 1091
+ },
+ {
+ "epoch": 1.092307211403207,
+ "grad_norm": 0.7956790924072266,
+ "learning_rate": 0.0001413333525625784,
+ "loss": 1.3601,
+ "step": 1092
+ },
+ {
+ "epoch": 1.093307492732331,
+ "grad_norm": 0.6703265309333801,
+ "learning_rate": 0.00014123787873640754,
+ "loss": 1.374,
+ "step": 1093
+ },
+ {
+ "epoch": 1.0943077740614549,
+ "grad_norm": 0.7583750486373901,
+ "learning_rate": 0.00014114235959726575,
+ "loss": 1.3064,
+ "step": 1094
+ },
+ {
+ "epoch": 1.0953080553905785,
+ "grad_norm": 0.6749271154403687,
+ "learning_rate": 0.0001410467952501114,
+ "loss": 1.4501,
+ "step": 1095
+ },
+ {
+ "epoch": 1.0963083367197024,
+ "grad_norm": 0.6708521842956543,
+ "learning_rate": 0.00014095118579995235,
+ "loss": 1.5046,
+ "step": 1096
+ },
+ {
+ "epoch": 1.0973086180488263,
+ "grad_norm": 0.5871726870536804,
+ "learning_rate": 0.0001408555313518461,
+ "loss": 1.3549,
+ "step": 1097
+ },
+ {
+ "epoch": 1.09830889937795,
+ "grad_norm": 0.6886669397354126,
+ "learning_rate": 0.00014075983201089964,
+ "loss": 1.3131,
+ "step": 1098
+ },
+ {
+ "epoch": 1.0993091807070738,
+ "grad_norm": 0.6243886351585388,
+ "learning_rate": 0.0001406640878822692,
+ "loss": 1.2278,
+ "step": 1099
+ },
+ {
+ "epoch": 1.1003094620361977,
+ "grad_norm": 0.7198624610900879,
+ "learning_rate": 0.00014056829907116024,
+ "loss": 1.4459,
+ "step": 1100
+ },
+ {
+ "epoch": 1.1013097433653214,
+ "grad_norm": 0.8059262037277222,
+ "learning_rate": 0.00014047246568282736,
+ "loss": 1.473,
+ "step": 1101
+ },
+ {
+ "epoch": 1.1023100246944453,
+ "grad_norm": 0.8409417271614075,
+ "learning_rate": 0.00014037658782257414,
+ "loss": 1.6688,
+ "step": 1102
+ },
+ {
+ "epoch": 1.1033103060235692,
+ "grad_norm": 0.739276111125946,
+ "learning_rate": 0.00014028066559575302,
+ "loss": 1.6182,
+ "step": 1103
+ },
+ {
+ "epoch": 1.104310587352693,
+ "grad_norm": 0.6282714009284973,
+ "learning_rate": 0.00014018469910776513,
+ "loss": 1.2835,
+ "step": 1104
+ },
+ {
+ "epoch": 1.1053108686818167,
+ "grad_norm": 0.7133497595787048,
+ "learning_rate": 0.0001400886884640603,
+ "loss": 1.5798,
+ "step": 1105
+ },
+ {
+ "epoch": 1.1063111500109406,
+ "grad_norm": 0.6376346945762634,
+ "learning_rate": 0.00013999263377013693,
+ "loss": 1.1436,
+ "step": 1106
+ },
+ {
+ "epoch": 1.1073114313400645,
+ "grad_norm": 0.5934734344482422,
+ "learning_rate": 0.00013989653513154165,
+ "loss": 1.3204,
+ "step": 1107
+ },
+ {
+ "epoch": 1.1083117126691882,
+ "grad_norm": 0.6655352115631104,
+ "learning_rate": 0.00013980039265386955,
+ "loss": 1.4602,
+ "step": 1108
+ },
+ {
+ "epoch": 1.109311993998312,
+ "grad_norm": 0.7147901058197021,
+ "learning_rate": 0.00013970420644276383,
+ "loss": 1.4124,
+ "step": 1109
+ },
+ {
+ "epoch": 1.110312275327436,
+ "grad_norm": 0.7845139503479004,
+ "learning_rate": 0.0001396079766039157,
+ "loss": 1.5831,
+ "step": 1110
+ },
+ {
+ "epoch": 1.1113125566565596,
+ "grad_norm": 0.8100587129592896,
+ "learning_rate": 0.00013951170324306435,
+ "loss": 1.6218,
+ "step": 1111
+ },
+ {
+ "epoch": 1.1123128379856835,
+ "grad_norm": 0.689988374710083,
+ "learning_rate": 0.00013941538646599687,
+ "loss": 1.2396,
+ "step": 1112
+ },
+ {
+ "epoch": 1.1133131193148074,
+ "grad_norm": 0.6771540641784668,
+ "learning_rate": 0.0001393190263785479,
+ "loss": 1.3739,
+ "step": 1113
+ },
+ {
+ "epoch": 1.114313400643931,
+ "grad_norm": 0.6424306631088257,
+ "learning_rate": 0.0001392226230865998,
+ "loss": 1.1653,
+ "step": 1114
+ },
+ {
+ "epoch": 1.115313681973055,
+ "grad_norm": 0.6135202646255493,
+ "learning_rate": 0.0001391261766960823,
+ "loss": 1.1924,
+ "step": 1115
+ },
+ {
+ "epoch": 1.1163139633021788,
+ "grad_norm": 0.6751917004585266,
+ "learning_rate": 0.00013902968731297255,
+ "loss": 1.4491,
+ "step": 1116
+ },
+ {
+ "epoch": 1.1173142446313025,
+ "grad_norm": 0.875303328037262,
+ "learning_rate": 0.00013893315504329498,
+ "loss": 1.3918,
+ "step": 1117
+ },
+ {
+ "epoch": 1.1183145259604264,
+ "grad_norm": 0.7102020978927612,
+ "learning_rate": 0.00013883657999312109,
+ "loss": 1.463,
+ "step": 1118
+ },
+ {
+ "epoch": 1.1193148072895502,
+ "grad_norm": 0.6863378882408142,
+ "learning_rate": 0.00013873996226856933,
+ "loss": 1.3958,
+ "step": 1119
+ },
+ {
+ "epoch": 1.120315088618674,
+ "grad_norm": 0.6769587397575378,
+ "learning_rate": 0.00013864330197580513,
+ "loss": 1.3044,
+ "step": 1120
+ },
+ {
+ "epoch": 1.1213153699477978,
+ "grad_norm": 0.7217769026756287,
+ "learning_rate": 0.0001385465992210407,
+ "loss": 1.6125,
+ "step": 1121
+ },
+ {
+ "epoch": 1.1223156512769217,
+ "grad_norm": 0.6756213903427124,
+ "learning_rate": 0.00013844985411053492,
+ "loss": 1.3658,
+ "step": 1122
+ },
+ {
+ "epoch": 1.1233159326060456,
+ "grad_norm": 0.7109145522117615,
+ "learning_rate": 0.00013835306675059308,
+ "loss": 1.5698,
+ "step": 1123
+ },
+ {
+ "epoch": 1.1243162139351692,
+ "grad_norm": 0.5903546810150146,
+ "learning_rate": 0.00013825623724756704,
+ "loss": 1.4429,
+ "step": 1124
+ },
+ {
+ "epoch": 1.1253164952642931,
+ "grad_norm": 0.7500163912773132,
+ "learning_rate": 0.00013815936570785487,
+ "loss": 1.2482,
+ "step": 1125
+ },
+ {
+ "epoch": 1.1263167765934168,
+ "grad_norm": 0.6458998918533325,
+ "learning_rate": 0.00013806245223790088,
+ "loss": 1.3496,
+ "step": 1126
+ },
+ {
+ "epoch": 1.1273170579225407,
+ "grad_norm": 0.627657413482666,
+ "learning_rate": 0.0001379654969441955,
+ "loss": 1.4847,
+ "step": 1127
+ },
+ {
+ "epoch": 1.1283173392516646,
+ "grad_norm": 0.7440046072006226,
+ "learning_rate": 0.000137868499933275,
+ "loss": 1.782,
+ "step": 1128
+ },
+ {
+ "epoch": 1.1293176205807884,
+ "grad_norm": 0.6717308163642883,
+ "learning_rate": 0.00013777146131172162,
+ "loss": 1.6345,
+ "step": 1129
+ },
+ {
+ "epoch": 1.130317901909912,
+ "grad_norm": 0.6480956673622131,
+ "learning_rate": 0.00013767438118616318,
+ "loss": 1.2862,
+ "step": 1130
+ },
+ {
+ "epoch": 1.131318183239036,
+ "grad_norm": 0.6778338551521301,
+ "learning_rate": 0.00013757725966327322,
+ "loss": 1.4821,
+ "step": 1131
+ },
+ {
+ "epoch": 1.1323184645681599,
+ "grad_norm": 0.6759636402130127,
+ "learning_rate": 0.00013748009684977073,
+ "loss": 1.5988,
+ "step": 1132
+ },
+ {
+ "epoch": 1.1333187458972835,
+ "grad_norm": 0.674404501914978,
+ "learning_rate": 0.0001373828928524201,
+ "loss": 1.4744,
+ "step": 1133
+ },
+ {
+ "epoch": 1.1343190272264074,
+ "grad_norm": 0.6017488241195679,
+ "learning_rate": 0.00013728564777803088,
+ "loss": 1.6296,
+ "step": 1134
+ },
+ {
+ "epoch": 1.1353193085555313,
+ "grad_norm": 0.6459933519363403,
+ "learning_rate": 0.00013718836173345783,
+ "loss": 1.5347,
+ "step": 1135
+ },
+ {
+ "epoch": 1.136319589884655,
+ "grad_norm": 0.6578681468963623,
+ "learning_rate": 0.00013709103482560078,
+ "loss": 1.4101,
+ "step": 1136
+ },
+ {
+ "epoch": 1.1373198712137789,
+ "grad_norm": 0.5906695127487183,
+ "learning_rate": 0.00013699366716140435,
+ "loss": 1.422,
+ "step": 1137
+ },
+ {
+ "epoch": 1.1383201525429028,
+ "grad_norm": 0.5622004866600037,
+ "learning_rate": 0.00013689625884785798,
+ "loss": 1.2805,
+ "step": 1138
+ },
+ {
+ "epoch": 1.1393204338720264,
+ "grad_norm": 0.7057269811630249,
+ "learning_rate": 0.00013679880999199583,
+ "loss": 1.5009,
+ "step": 1139
+ },
+ {
+ "epoch": 1.1403207152011503,
+ "grad_norm": 0.6655155420303345,
+ "learning_rate": 0.00013670132070089653,
+ "loss": 1.3504,
+ "step": 1140
+ },
+ {
+ "epoch": 1.1413209965302742,
+ "grad_norm": 0.6448667645454407,
+ "learning_rate": 0.00013660379108168324,
+ "loss": 1.4345,
+ "step": 1141
+ },
+ {
+ "epoch": 1.142321277859398,
+ "grad_norm": 0.6696295142173767,
+ "learning_rate": 0.00013650622124152334,
+ "loss": 1.3248,
+ "step": 1142
+ },
+ {
+ "epoch": 1.1433215591885217,
+ "grad_norm": 0.8913035988807678,
+ "learning_rate": 0.0001364086112876284,
+ "loss": 1.3148,
+ "step": 1143
+ },
+ {
+ "epoch": 1.1443218405176456,
+ "grad_norm": 0.6853944659233093,
+ "learning_rate": 0.00013631096132725413,
+ "loss": 1.4361,
+ "step": 1144
+ },
+ {
+ "epoch": 1.1453221218467693,
+ "grad_norm": 0.6286287307739258,
+ "learning_rate": 0.00013621327146770025,
+ "loss": 1.4485,
+ "step": 1145
+ },
+ {
+ "epoch": 1.1463224031758932,
+ "grad_norm": 0.6847277283668518,
+ "learning_rate": 0.00013611554181631013,
+ "loss": 1.4095,
+ "step": 1146
+ },
+ {
+ "epoch": 1.147322684505017,
+ "grad_norm": 0.6514857411384583,
+ "learning_rate": 0.00013601777248047105,
+ "loss": 1.4106,
+ "step": 1147
+ },
+ {
+ "epoch": 1.148322965834141,
+ "grad_norm": 0.6113057732582092,
+ "learning_rate": 0.0001359199635676138,
+ "loss": 1.3483,
+ "step": 1148
+ },
+ {
+ "epoch": 1.1493232471632646,
+ "grad_norm": 0.6366062164306641,
+ "learning_rate": 0.00013582211518521273,
+ "loss": 1.4164,
+ "step": 1149
+ },
+ {
+ "epoch": 1.1503235284923885,
+ "grad_norm": 0.6680336594581604,
+ "learning_rate": 0.00013572422744078551,
+ "loss": 1.5326,
+ "step": 1150
+ },
+ {
+ "epoch": 1.1513238098215124,
+ "grad_norm": 0.6046566367149353,
+ "learning_rate": 0.00013562630044189304,
+ "loss": 1.4139,
+ "step": 1151
+ },
+ {
+ "epoch": 1.152324091150636,
+ "grad_norm": 0.5869380235671997,
+ "learning_rate": 0.00013552833429613938,
+ "loss": 1.2859,
+ "step": 1152
+ },
+ {
+ "epoch": 1.15332437247976,
+ "grad_norm": 0.6923080682754517,
+ "learning_rate": 0.0001354303291111716,
+ "loss": 1.3419,
+ "step": 1153
+ },
+ {
+ "epoch": 1.1543246538088838,
+ "grad_norm": 0.698279082775116,
+ "learning_rate": 0.0001353322849946797,
+ "loss": 1.4796,
+ "step": 1154
+ },
+ {
+ "epoch": 1.1553249351380075,
+ "grad_norm": 0.6980450749397278,
+ "learning_rate": 0.00013523420205439646,
+ "loss": 1.5941,
+ "step": 1155
+ },
+ {
+ "epoch": 1.1563252164671314,
+ "grad_norm": 0.7222338914871216,
+ "learning_rate": 0.0001351360803980972,
+ "loss": 1.5019,
+ "step": 1156
+ },
+ {
+ "epoch": 1.1573254977962553,
+ "grad_norm": 0.6446929574012756,
+ "learning_rate": 0.00013503792013359997,
+ "loss": 1.2706,
+ "step": 1157
+ },
+ {
+ "epoch": 1.158325779125379,
+ "grad_norm": 0.699488639831543,
+ "learning_rate": 0.00013493972136876509,
+ "loss": 1.5775,
+ "step": 1158
+ },
+ {
+ "epoch": 1.1593260604545028,
+ "grad_norm": 0.6865110397338867,
+ "learning_rate": 0.00013484148421149527,
+ "loss": 1.5015,
+ "step": 1159
+ },
+ {
+ "epoch": 1.1603263417836267,
+ "grad_norm": 0.800570547580719,
+ "learning_rate": 0.0001347432087697354,
+ "loss": 1.401,
+ "step": 1160
+ },
+ {
+ "epoch": 1.1613266231127504,
+ "grad_norm": 0.706388533115387,
+ "learning_rate": 0.00013464489515147238,
+ "loss": 1.2895,
+ "step": 1161
+ },
+ {
+ "epoch": 1.1623269044418743,
+ "grad_norm": 0.7967466711997986,
+ "learning_rate": 0.0001345465434647351,
+ "loss": 1.848,
+ "step": 1162
+ },
+ {
+ "epoch": 1.1633271857709981,
+ "grad_norm": 0.7130827903747559,
+ "learning_rate": 0.00013444815381759425,
+ "loss": 1.2899,
+ "step": 1163
+ },
+ {
+ "epoch": 1.1643274671001218,
+ "grad_norm": 0.6437693238258362,
+ "learning_rate": 0.00013434972631816235,
+ "loss": 1.4456,
+ "step": 1164
+ },
+ {
+ "epoch": 1.1653277484292457,
+ "grad_norm": 0.6305271983146667,
+ "learning_rate": 0.0001342512610745933,
+ "loss": 1.3375,
+ "step": 1165
+ },
+ {
+ "epoch": 1.1663280297583696,
+ "grad_norm": 0.6622384786605835,
+ "learning_rate": 0.0001341527581950827,
+ "loss": 1.6379,
+ "step": 1166
+ },
+ {
+ "epoch": 1.1673283110874935,
+ "grad_norm": 0.64511638879776,
+ "learning_rate": 0.00013405421778786737,
+ "loss": 1.2818,
+ "step": 1167
+ },
+ {
+ "epoch": 1.1683285924166171,
+ "grad_norm": 0.6575477719306946,
+ "learning_rate": 0.00013395563996122537,
+ "loss": 1.2264,
+ "step": 1168
+ },
+ {
+ "epoch": 1.169328873745741,
+ "grad_norm": 0.787896990776062,
+ "learning_rate": 0.00013385702482347593,
+ "loss": 1.6519,
+ "step": 1169
+ },
+ {
+ "epoch": 1.170329155074865,
+ "grad_norm": 0.7512592077255249,
+ "learning_rate": 0.00013375837248297926,
+ "loss": 1.4776,
+ "step": 1170
+ },
+ {
+ "epoch": 1.1713294364039886,
+ "grad_norm": 0.9541054964065552,
+ "learning_rate": 0.0001336596830481364,
+ "loss": 1.5383,
+ "step": 1171
+ },
+ {
+ "epoch": 1.1723297177331125,
+ "grad_norm": 0.7181218266487122,
+ "learning_rate": 0.0001335609566273892,
+ "loss": 1.4088,
+ "step": 1172
+ },
+ {
+ "epoch": 1.1733299990622363,
+ "grad_norm": 0.7449761629104614,
+ "learning_rate": 0.00013346219332922016,
+ "loss": 1.2313,
+ "step": 1173
+ },
+ {
+ "epoch": 1.17433028039136,
+ "grad_norm": 0.7783718705177307,
+ "learning_rate": 0.00013336339326215228,
+ "loss": 1.4114,
+ "step": 1174
+ },
+ {
+ "epoch": 1.175330561720484,
+ "grad_norm": 0.8479213714599609,
+ "learning_rate": 0.00013326455653474897,
+ "loss": 1.6336,
+ "step": 1175
+ },
+ {
+ "epoch": 1.1763308430496078,
+ "grad_norm": 0.7903116941452026,
+ "learning_rate": 0.00013316568325561393,
+ "loss": 1.3251,
+ "step": 1176
+ },
+ {
+ "epoch": 1.1773311243787314,
+ "grad_norm": 0.7016749978065491,
+ "learning_rate": 0.00013306677353339098,
+ "loss": 1.5013,
+ "step": 1177
+ },
+ {
+ "epoch": 1.1783314057078553,
+ "grad_norm": 0.6183115839958191,
+ "learning_rate": 0.000132967827476764,
+ "loss": 1.3984,
+ "step": 1178
+ },
+ {
+ "epoch": 1.1793316870369792,
+ "grad_norm": 0.6227801442146301,
+ "learning_rate": 0.0001328688451944569,
+ "loss": 1.2306,
+ "step": 1179
+ },
+ {
+ "epoch": 1.1803319683661029,
+ "grad_norm": 0.7611119151115417,
+ "learning_rate": 0.00013276982679523322,
+ "loss": 1.3648,
+ "step": 1180
+ },
+ {
+ "epoch": 1.1813322496952268,
+ "grad_norm": 0.6929368376731873,
+ "learning_rate": 0.00013267077238789633,
+ "loss": 1.5107,
+ "step": 1181
+ },
+ {
+ "epoch": 1.1823325310243507,
+ "grad_norm": 0.6637099385261536,
+ "learning_rate": 0.00013257168208128908,
+ "loss": 1.5103,
+ "step": 1182
+ },
+ {
+ "epoch": 1.1833328123534743,
+ "grad_norm": 0.6320933103561401,
+ "learning_rate": 0.00013247255598429378,
+ "loss": 1.4024,
+ "step": 1183
+ },
+ {
+ "epoch": 1.1843330936825982,
+ "grad_norm": 0.8260888457298279,
+ "learning_rate": 0.00013237339420583212,
+ "loss": 1.4385,
+ "step": 1184
+ },
+ {
+ "epoch": 1.185333375011722,
+ "grad_norm": 0.7872930765151978,
+ "learning_rate": 0.00013227419685486492,
+ "loss": 1.2566,
+ "step": 1185
+ },
+ {
+ "epoch": 1.186333656340846,
+ "grad_norm": 0.6857215762138367,
+ "learning_rate": 0.00013217496404039218,
+ "loss": 1.619,
+ "step": 1186
+ },
+ {
+ "epoch": 1.1873339376699696,
+ "grad_norm": 0.708858072757721,
+ "learning_rate": 0.0001320756958714528,
+ "loss": 1.2228,
+ "step": 1187
+ },
+ {
+ "epoch": 1.1883342189990935,
+ "grad_norm": 0.6442694067955017,
+ "learning_rate": 0.00013197639245712454,
+ "loss": 1.5542,
+ "step": 1188
+ },
+ {
+ "epoch": 1.1893345003282172,
+ "grad_norm": 0.5615749359130859,
+ "learning_rate": 0.00013187705390652388,
+ "loss": 1.5289,
+ "step": 1189
+ },
+ {
+ "epoch": 1.190334781657341,
+ "grad_norm": 0.6250069737434387,
+ "learning_rate": 0.00013177768032880593,
+ "loss": 1.3849,
+ "step": 1190
+ },
+ {
+ "epoch": 1.191335062986465,
+ "grad_norm": 0.6521658301353455,
+ "learning_rate": 0.0001316782718331643,
+ "loss": 1.4118,
+ "step": 1191
+ },
+ {
+ "epoch": 1.1923353443155889,
+ "grad_norm": 0.7188206315040588,
+ "learning_rate": 0.0001315788285288309,
+ "loss": 1.4436,
+ "step": 1192
+ },
+ {
+ "epoch": 1.1933356256447125,
+ "grad_norm": 0.7098423838615417,
+ "learning_rate": 0.00013147935052507597,
+ "loss": 1.3339,
+ "step": 1193
+ },
+ {
+ "epoch": 1.1943359069738364,
+ "grad_norm": 0.655750036239624,
+ "learning_rate": 0.00013137983793120786,
+ "loss": 1.4208,
+ "step": 1194
+ },
+ {
+ "epoch": 1.1953361883029603,
+ "grad_norm": 0.640650749206543,
+ "learning_rate": 0.0001312802908565729,
+ "loss": 1.7209,
+ "step": 1195
+ },
+ {
+ "epoch": 1.196336469632084,
+ "grad_norm": 0.601091206073761,
+ "learning_rate": 0.0001311807094105553,
+ "loss": 1.4339,
+ "step": 1196
+ },
+ {
+ "epoch": 1.1973367509612078,
+ "grad_norm": 0.5894292593002319,
+ "learning_rate": 0.00013108109370257712,
+ "loss": 1.4687,
+ "step": 1197
+ },
+ {
+ "epoch": 1.1983370322903317,
+ "grad_norm": 0.7012053728103638,
+ "learning_rate": 0.00013098144384209796,
+ "loss": 1.5834,
+ "step": 1198
+ },
+ {
+ "epoch": 1.1993373136194554,
+ "grad_norm": 0.636356770992279,
+ "learning_rate": 0.000130881759938615,
+ "loss": 1.1723,
+ "step": 1199
+ },
+ {
+ "epoch": 1.2003375949485793,
+ "grad_norm": 0.6913763284683228,
+ "learning_rate": 0.00013078204210166278,
+ "loss": 1.3327,
+ "step": 1200
+ },
+ {
+ "epoch": 1.2013378762777032,
+ "grad_norm": 0.7067025303840637,
+ "learning_rate": 0.00013068229044081324,
+ "loss": 1.4671,
+ "step": 1201
+ },
+ {
+ "epoch": 1.2023381576068268,
+ "grad_norm": 0.698302149772644,
+ "learning_rate": 0.0001305825050656754,
+ "loss": 1.601,
+ "step": 1202
+ },
+ {
+ "epoch": 1.2033384389359507,
+ "grad_norm": 0.6283687949180603,
+ "learning_rate": 0.00013048268608589533,
+ "loss": 1.4164,
+ "step": 1203
+ },
+ {
+ "epoch": 1.2043387202650746,
+ "grad_norm": 0.6440062522888184,
+ "learning_rate": 0.00013038283361115603,
+ "loss": 1.3725,
+ "step": 1204
+ },
+ {
+ "epoch": 1.2053390015941985,
+ "grad_norm": 0.726294994354248,
+ "learning_rate": 0.0001302829477511773,
+ "loss": 1.4557,
+ "step": 1205
+ },
+ {
+ "epoch": 1.2063392829233222,
+ "grad_norm": 0.5984981060028076,
+ "learning_rate": 0.0001301830286157157,
+ "loss": 1.3455,
+ "step": 1206
+ },
+ {
+ "epoch": 1.207339564252446,
+ "grad_norm": 0.6096123456954956,
+ "learning_rate": 0.0001300830763145642,
+ "loss": 1.3562,
+ "step": 1207
+ },
+ {
+ "epoch": 1.2083398455815697,
+ "grad_norm": 0.815022349357605,
+ "learning_rate": 0.00012998309095755235,
+ "loss": 1.5287,
+ "step": 1208
+ },
+ {
+ "epoch": 1.2093401269106936,
+ "grad_norm": 0.990715742111206,
+ "learning_rate": 0.00012988307265454597,
+ "loss": 1.4186,
+ "step": 1209
+ },
+ {
+ "epoch": 1.2103404082398175,
+ "grad_norm": 0.8175992369651794,
+ "learning_rate": 0.0001297830215154471,
+ "loss": 1.3145,
+ "step": 1210
+ },
+ {
+ "epoch": 1.2113406895689414,
+ "grad_norm": 0.734703779220581,
+ "learning_rate": 0.00012968293765019384,
+ "loss": 1.359,
+ "step": 1211
+ },
+ {
+ "epoch": 1.212340970898065,
+ "grad_norm": 0.7687499523162842,
+ "learning_rate": 0.00012958282116876026,
+ "loss": 1.517,
+ "step": 1212
+ },
+ {
+ "epoch": 1.213341252227189,
+ "grad_norm": 0.5819374918937683,
+ "learning_rate": 0.00012948267218115624,
+ "loss": 1.353,
+ "step": 1213
+ },
+ {
+ "epoch": 1.2143415335563128,
+ "grad_norm": 0.6273573040962219,
+ "learning_rate": 0.00012938249079742743,
+ "loss": 1.3114,
+ "step": 1214
+ },
+ {
+ "epoch": 1.2153418148854365,
+ "grad_norm": 0.8144434094429016,
+ "learning_rate": 0.00012928227712765504,
+ "loss": 1.4763,
+ "step": 1215
+ },
+ {
+ "epoch": 1.2163420962145604,
+ "grad_norm": 0.6941531300544739,
+ "learning_rate": 0.0001291820312819558,
+ "loss": 1.3386,
+ "step": 1216
+ },
+ {
+ "epoch": 1.2173423775436842,
+ "grad_norm": 0.6548559069633484,
+ "learning_rate": 0.00012908175337048174,
+ "loss": 1.4763,
+ "step": 1217
+ },
+ {
+ "epoch": 1.218342658872808,
+ "grad_norm": 0.6587492227554321,
+ "learning_rate": 0.00012898144350342015,
+ "loss": 1.5342,
+ "step": 1218
+ },
+ {
+ "epoch": 1.2193429402019318,
+ "grad_norm": 0.6187465190887451,
+ "learning_rate": 0.0001288811017909934,
+ "loss": 1.5109,
+ "step": 1219
+ },
+ {
+ "epoch": 1.2203432215310557,
+ "grad_norm": 0.6152673959732056,
+ "learning_rate": 0.00012878072834345895,
+ "loss": 1.2812,
+ "step": 1220
+ },
+ {
+ "epoch": 1.2213435028601793,
+ "grad_norm": 0.6489769816398621,
+ "learning_rate": 0.00012868032327110904,
+ "loss": 1.4179,
+ "step": 1221
+ },
+ {
+ "epoch": 1.2223437841893032,
+ "grad_norm": 0.6900584697723389,
+ "learning_rate": 0.00012857988668427066,
+ "loss": 1.5862,
+ "step": 1222
+ },
+ {
+ "epoch": 1.2233440655184271,
+ "grad_norm": 0.7858991026878357,
+ "learning_rate": 0.0001284794186933055,
+ "loss": 1.5595,
+ "step": 1223
+ },
+ {
+ "epoch": 1.2243443468475508,
+ "grad_norm": 0.6736376285552979,
+ "learning_rate": 0.00012837891940860972,
+ "loss": 1.6053,
+ "step": 1224
+ },
+ {
+ "epoch": 1.2253446281766747,
+ "grad_norm": 0.6328126192092896,
+ "learning_rate": 0.00012827838894061377,
+ "loss": 1.5157,
+ "step": 1225
+ },
+ {
+ "epoch": 1.2263449095057986,
+ "grad_norm": 0.6681983470916748,
+ "learning_rate": 0.00012817782739978255,
+ "loss": 1.3832,
+ "step": 1226
+ },
+ {
+ "epoch": 1.2273451908349222,
+ "grad_norm": 0.6474846601486206,
+ "learning_rate": 0.00012807723489661495,
+ "loss": 1.4772,
+ "step": 1227
+ },
+ {
+ "epoch": 1.228345472164046,
+ "grad_norm": 0.7181409597396851,
+ "learning_rate": 0.00012797661154164395,
+ "loss": 1.4942,
+ "step": 1228
+ },
+ {
+ "epoch": 1.22934575349317,
+ "grad_norm": 0.8670255541801453,
+ "learning_rate": 0.00012787595744543647,
+ "loss": 1.3818,
+ "step": 1229
+ },
+ {
+ "epoch": 1.2303460348222939,
+ "grad_norm": 0.7007678747177124,
+ "learning_rate": 0.00012777527271859307,
+ "loss": 1.4428,
+ "step": 1230
+ },
+ {
+ "epoch": 1.2313463161514175,
+ "grad_norm": 0.5932227373123169,
+ "learning_rate": 0.0001276745574717481,
+ "loss": 1.1946,
+ "step": 1231
+ },
+ {
+ "epoch": 1.2323465974805414,
+ "grad_norm": 0.6067792177200317,
+ "learning_rate": 0.00012757381181556943,
+ "loss": 1.1963,
+ "step": 1232
+ },
+ {
+ "epoch": 1.2333468788096653,
+ "grad_norm": 0.6702238917350769,
+ "learning_rate": 0.0001274730358607583,
+ "loss": 1.4489,
+ "step": 1233
+ },
+ {
+ "epoch": 1.234347160138789,
+ "grad_norm": 0.6660708785057068,
+ "learning_rate": 0.00012737222971804924,
+ "loss": 1.4307,
+ "step": 1234
+ },
+ {
+ "epoch": 1.2353474414679129,
+ "grad_norm": 0.6711790561676025,
+ "learning_rate": 0.00012727139349821,
+ "loss": 1.4167,
+ "step": 1235
+ },
+ {
+ "epoch": 1.2363477227970368,
+ "grad_norm": 0.7145324349403381,
+ "learning_rate": 0.0001271705273120413,
+ "loss": 1.4125,
+ "step": 1236
+ },
+ {
+ "epoch": 1.2373480041261604,
+ "grad_norm": 0.6572199463844299,
+ "learning_rate": 0.00012706963127037685,
+ "loss": 1.2403,
+ "step": 1237
+ },
+ {
+ "epoch": 1.2383482854552843,
+ "grad_norm": 0.634173572063446,
+ "learning_rate": 0.00012696870548408316,
+ "loss": 1.2861,
+ "step": 1238
+ },
+ {
+ "epoch": 1.2393485667844082,
+ "grad_norm": 0.6740716695785522,
+ "learning_rate": 0.00012686775006405946,
+ "loss": 1.6377,
+ "step": 1239
+ },
+ {
+ "epoch": 1.2403488481135319,
+ "grad_norm": 0.7334823608398438,
+ "learning_rate": 0.00012676676512123747,
+ "loss": 1.4186,
+ "step": 1240
+ },
+ {
+ "epoch": 1.2413491294426557,
+ "grad_norm": 0.9025078415870667,
+ "learning_rate": 0.00012666575076658134,
+ "loss": 1.4633,
+ "step": 1241
+ },
+ {
+ "epoch": 1.2423494107717796,
+ "grad_norm": 0.7449138760566711,
+ "learning_rate": 0.00012656470711108764,
+ "loss": 1.5294,
+ "step": 1242
+ },
+ {
+ "epoch": 1.2433496921009033,
+ "grad_norm": 0.6459099054336548,
+ "learning_rate": 0.00012646363426578505,
+ "loss": 1.4404,
+ "step": 1243
+ },
+ {
+ "epoch": 1.2443499734300272,
+ "grad_norm": 0.762955904006958,
+ "learning_rate": 0.0001263625323417343,
+ "loss": 1.6486,
+ "step": 1244
+ },
+ {
+ "epoch": 1.245350254759151,
+ "grad_norm": 0.6792619228363037,
+ "learning_rate": 0.0001262614014500282,
+ "loss": 1.3946,
+ "step": 1245
+ },
+ {
+ "epoch": 1.2463505360882747,
+ "grad_norm": 0.7938026189804077,
+ "learning_rate": 0.00012616024170179126,
+ "loss": 1.5163,
+ "step": 1246
+ },
+ {
+ "epoch": 1.2473508174173986,
+ "grad_norm": 0.6673023104667664,
+ "learning_rate": 0.00012605905320817976,
+ "loss": 1.5599,
+ "step": 1247
+ },
+ {
+ "epoch": 1.2483510987465225,
+ "grad_norm": 0.6543686389923096,
+ "learning_rate": 0.00012595783608038155,
+ "loss": 1.3298,
+ "step": 1248
+ },
+ {
+ "epoch": 1.2493513800756464,
+ "grad_norm": 0.6570146083831787,
+ "learning_rate": 0.00012585659042961596,
+ "loss": 1.2528,
+ "step": 1249
+ },
+ {
+ "epoch": 1.25035166140477,
+ "grad_norm": 0.6886934638023376,
+ "learning_rate": 0.00012575531636713368,
+ "loss": 1.4567,
+ "step": 1250
+ },
+ {
+ "epoch": 1.251351942733894,
+ "grad_norm": 0.660229504108429,
+ "learning_rate": 0.00012565401400421651,
+ "loss": 1.3333,
+ "step": 1251
+ },
+ {
+ "epoch": 1.2523522240630176,
+ "grad_norm": 0.7553595900535583,
+ "learning_rate": 0.0001255526834521775,
+ "loss": 1.4152,
+ "step": 1252
+ },
+ {
+ "epoch": 1.2533525053921415,
+ "grad_norm": 0.6283079385757446,
+ "learning_rate": 0.00012545132482236055,
+ "loss": 1.3201,
+ "step": 1253
+ },
+ {
+ "epoch": 1.2543527867212654,
+ "grad_norm": 0.6704882383346558,
+ "learning_rate": 0.0001253499382261405,
+ "loss": 1.3711,
+ "step": 1254
+ },
+ {
+ "epoch": 1.2553530680503893,
+ "grad_norm": 0.7282963991165161,
+ "learning_rate": 0.00012524852377492285,
+ "loss": 1.5492,
+ "step": 1255
+ },
+ {
+ "epoch": 1.256353349379513,
+ "grad_norm": 0.7712034583091736,
+ "learning_rate": 0.00012514708158014378,
+ "loss": 1.4299,
+ "step": 1256
+ },
+ {
+ "epoch": 1.2573536307086368,
+ "grad_norm": 0.6231324076652527,
+ "learning_rate": 0.00012504561175326985,
+ "loss": 1.3492,
+ "step": 1257
+ },
+ {
+ "epoch": 1.2583539120377607,
+ "grad_norm": 0.7122861742973328,
+ "learning_rate": 0.00012494411440579814,
+ "loss": 1.4583,
+ "step": 1258
+ },
+ {
+ "epoch": 1.2593541933668844,
+ "grad_norm": 0.7797596454620361,
+ "learning_rate": 0.0001248425896492558,
+ "loss": 1.4185,
+ "step": 1259
+ },
+ {
+ "epoch": 1.2603544746960083,
+ "grad_norm": 0.7058592438697815,
+ "learning_rate": 0.00012474103759520027,
+ "loss": 1.7918,
+ "step": 1260
+ },
+ {
+ "epoch": 1.2613547560251321,
+ "grad_norm": 0.6629828214645386,
+ "learning_rate": 0.00012463945835521878,
+ "loss": 1.2547,
+ "step": 1261
+ },
+ {
+ "epoch": 1.2623550373542558,
+ "grad_norm": 0.6975031495094299,
+ "learning_rate": 0.0001245378520409286,
+ "loss": 1.6256,
+ "step": 1262
+ },
+ {
+ "epoch": 1.2633553186833797,
+ "grad_norm": 0.7939541935920715,
+ "learning_rate": 0.0001244362187639767,
+ "loss": 1.2817,
+ "step": 1263
+ },
+ {
+ "epoch": 1.2643556000125036,
+ "grad_norm": 0.7042918801307678,
+ "learning_rate": 0.00012433455863603967,
+ "loss": 1.4207,
+ "step": 1264
+ },
+ {
+ "epoch": 1.2653558813416272,
+ "grad_norm": 0.6327396631240845,
+ "learning_rate": 0.00012423287176882358,
+ "loss": 1.3927,
+ "step": 1265
+ },
+ {
+ "epoch": 1.2663561626707511,
+ "grad_norm": 0.5797626376152039,
+ "learning_rate": 0.00012413115827406392,
+ "loss": 1.2808,
+ "step": 1266
+ },
+ {
+ "epoch": 1.267356443999875,
+ "grad_norm": 0.6891800165176392,
+ "learning_rate": 0.00012402941826352546,
+ "loss": 1.2385,
+ "step": 1267
+ },
+ {
+ "epoch": 1.268356725328999,
+ "grad_norm": 0.6648104190826416,
+ "learning_rate": 0.00012392765184900202,
+ "loss": 1.5402,
+ "step": 1268
+ },
+ {
+ "epoch": 1.2693570066581226,
+ "grad_norm": 0.7340229749679565,
+ "learning_rate": 0.0001238258591423165,
+ "loss": 1.2926,
+ "step": 1269
+ },
+ {
+ "epoch": 1.2703572879872465,
+ "grad_norm": 0.670605480670929,
+ "learning_rate": 0.00012372404025532072,
+ "loss": 1.3863,
+ "step": 1270
+ },
+ {
+ "epoch": 1.2713575693163701,
+ "grad_norm": 0.7016957402229309,
+ "learning_rate": 0.00012362219529989514,
+ "loss": 1.7362,
+ "step": 1271
+ },
+ {
+ "epoch": 1.272357850645494,
+ "grad_norm": 0.6318536400794983,
+ "learning_rate": 0.00012352032438794902,
+ "loss": 1.4278,
+ "step": 1272
+ },
+ {
+ "epoch": 1.273358131974618,
+ "grad_norm": 0.5807138681411743,
+ "learning_rate": 0.00012341842763142005,
+ "loss": 1.4762,
+ "step": 1273
+ },
+ {
+ "epoch": 1.2743584133037418,
+ "grad_norm": 0.6634588241577148,
+ "learning_rate": 0.00012331650514227425,
+ "loss": 1.6209,
+ "step": 1274
+ },
+ {
+ "epoch": 1.2753586946328654,
+ "grad_norm": 0.709530770778656,
+ "learning_rate": 0.00012321455703250616,
+ "loss": 1.5209,
+ "step": 1275
+ },
+ {
+ "epoch": 1.2763589759619893,
+ "grad_norm": 0.678584098815918,
+ "learning_rate": 0.00012311258341413822,
+ "loss": 1.4247,
+ "step": 1276
+ },
+ {
+ "epoch": 1.277359257291113,
+ "grad_norm": 0.6134077310562134,
+ "learning_rate": 0.00012301058439922102,
+ "loss": 1.3792,
+ "step": 1277
+ },
+ {
+ "epoch": 1.2783595386202369,
+ "grad_norm": 0.694976806640625,
+ "learning_rate": 0.000122908560099833,
+ "loss": 1.5957,
+ "step": 1278
+ },
+ {
+ "epoch": 1.2793598199493608,
+ "grad_norm": 0.835444986820221,
+ "learning_rate": 0.00012280651062808047,
+ "loss": 1.4917,
+ "step": 1279
+ },
+ {
+ "epoch": 1.2803601012784847,
+ "grad_norm": 0.6491605043411255,
+ "learning_rate": 0.00012270443609609729,
+ "loss": 1.4417,
+ "step": 1280
+ },
+ {
+ "epoch": 1.2813603826076083,
+ "grad_norm": 0.6651148200035095,
+ "learning_rate": 0.0001226023366160449,
+ "loss": 1.2857,
+ "step": 1281
+ },
+ {
+ "epoch": 1.2823606639367322,
+ "grad_norm": 0.6736195683479309,
+ "learning_rate": 0.00012250021230011225,
+ "loss": 1.3431,
+ "step": 1282
+ },
+ {
+ "epoch": 1.283360945265856,
+ "grad_norm": 0.7162345051765442,
+ "learning_rate": 0.00012239806326051539,
+ "loss": 1.4009,
+ "step": 1283
+ },
+ {
+ "epoch": 1.2843612265949798,
+ "grad_norm": 0.8337399363517761,
+ "learning_rate": 0.00012229588960949771,
+ "loss": 1.5303,
+ "step": 1284
+ },
+ {
+ "epoch": 1.2853615079241036,
+ "grad_norm": 0.6648454666137695,
+ "learning_rate": 0.00012219369145932959,
+ "loss": 1.7088,
+ "step": 1285
+ },
+ {
+ "epoch": 1.2863617892532275,
+ "grad_norm": 0.6891435384750366,
+ "learning_rate": 0.00012209146892230822,
+ "loss": 1.4053,
+ "step": 1286
+ },
+ {
+ "epoch": 1.2873620705823514,
+ "grad_norm": 0.6659008860588074,
+ "learning_rate": 0.00012198922211075778,
+ "loss": 1.3959,
+ "step": 1287
+ },
+ {
+ "epoch": 1.288362351911475,
+ "grad_norm": 0.6926385164260864,
+ "learning_rate": 0.00012188695113702896,
+ "loss": 1.536,
+ "step": 1288
+ },
+ {
+ "epoch": 1.289362633240599,
+ "grad_norm": 0.6584843397140503,
+ "learning_rate": 0.00012178465611349911,
+ "loss": 1.5099,
+ "step": 1289
+ },
+ {
+ "epoch": 1.2903629145697226,
+ "grad_norm": 0.7430850267410278,
+ "learning_rate": 0.00012168233715257194,
+ "loss": 1.3367,
+ "step": 1290
+ },
+ {
+ "epoch": 1.2913631958988465,
+ "grad_norm": 0.8379004597663879,
+ "learning_rate": 0.00012157999436667747,
+ "loss": 1.3542,
+ "step": 1291
+ },
+ {
+ "epoch": 1.2923634772279704,
+ "grad_norm": 0.7217230796813965,
+ "learning_rate": 0.00012147762786827193,
+ "loss": 1.4465,
+ "step": 1292
+ },
+ {
+ "epoch": 1.2933637585570943,
+ "grad_norm": 0.7268504500389099,
+ "learning_rate": 0.00012137523776983757,
+ "loss": 1.2616,
+ "step": 1293
+ },
+ {
+ "epoch": 1.294364039886218,
+ "grad_norm": 0.7402834296226501,
+ "learning_rate": 0.00012127282418388264,
+ "loss": 1.271,
+ "step": 1294
+ },
+ {
+ "epoch": 1.2953643212153418,
+ "grad_norm": 0.6314610242843628,
+ "learning_rate": 0.0001211703872229411,
+ "loss": 1.3718,
+ "step": 1295
+ },
+ {
+ "epoch": 1.2963646025444655,
+ "grad_norm": 0.6106632947921753,
+ "learning_rate": 0.00012106792699957263,
+ "loss": 1.6404,
+ "step": 1296
+ },
+ {
+ "epoch": 1.2973648838735894,
+ "grad_norm": 0.6568691730499268,
+ "learning_rate": 0.00012096544362636255,
+ "loss": 1.3559,
+ "step": 1297
+ },
+ {
+ "epoch": 1.2983651652027133,
+ "grad_norm": 0.700645387172699,
+ "learning_rate": 0.00012086293721592152,
+ "loss": 1.5258,
+ "step": 1298
+ },
+ {
+ "epoch": 1.2993654465318372,
+ "grad_norm": 1.0722559690475464,
+ "learning_rate": 0.00012076040788088554,
+ "loss": 1.4921,
+ "step": 1299
+ },
+ {
+ "epoch": 1.3003657278609608,
+ "grad_norm": 0.6164102554321289,
+ "learning_rate": 0.00012065785573391581,
+ "loss": 1.3766,
+ "step": 1300
+ },
+ {
+ "epoch": 1.3013660091900847,
+ "grad_norm": 0.7066829204559326,
+ "learning_rate": 0.00012055528088769861,
+ "loss": 1.4322,
+ "step": 1301
+ },
+ {
+ "epoch": 1.3023662905192086,
+ "grad_norm": 0.6311153769493103,
+ "learning_rate": 0.00012045268345494511,
+ "loss": 1.2958,
+ "step": 1302
+ },
+ {
+ "epoch": 1.3033665718483323,
+ "grad_norm": 0.6254247426986694,
+ "learning_rate": 0.00012035006354839133,
+ "loss": 1.3125,
+ "step": 1303
+ },
+ {
+ "epoch": 1.3043668531774562,
+ "grad_norm": 0.7812719941139221,
+ "learning_rate": 0.00012024742128079805,
+ "loss": 1.5446,
+ "step": 1304
+ },
+ {
+ "epoch": 1.30536713450658,
+ "grad_norm": 0.6067742109298706,
+ "learning_rate": 0.00012014475676495052,
+ "loss": 1.2821,
+ "step": 1305
+ },
+ {
+ "epoch": 1.306367415835704,
+ "grad_norm": 0.6812780499458313,
+ "learning_rate": 0.00012004207011365849,
+ "loss": 1.2988,
+ "step": 1306
+ },
+ {
+ "epoch": 1.3073676971648276,
+ "grad_norm": 0.6978224515914917,
+ "learning_rate": 0.00011993936143975599,
+ "loss": 1.3677,
+ "step": 1307
+ },
+ {
+ "epoch": 1.3083679784939515,
+ "grad_norm": 0.660102903842926,
+ "learning_rate": 0.00011983663085610131,
+ "loss": 1.3688,
+ "step": 1308
+ },
+ {
+ "epoch": 1.3093682598230751,
+ "grad_norm": 0.706007182598114,
+ "learning_rate": 0.00011973387847557676,
+ "loss": 1.2638,
+ "step": 1309
+ },
+ {
+ "epoch": 1.310368541152199,
+ "grad_norm": 0.6711030602455139,
+ "learning_rate": 0.00011963110441108863,
+ "loss": 1.2569,
+ "step": 1310
+ },
+ {
+ "epoch": 1.311368822481323,
+ "grad_norm": 0.6825215816497803,
+ "learning_rate": 0.000119528308775567,
+ "loss": 1.3912,
+ "step": 1311
+ },
+ {
+ "epoch": 1.3123691038104468,
+ "grad_norm": 0.725979745388031,
+ "learning_rate": 0.00011942549168196575,
+ "loss": 1.331,
+ "step": 1312
+ },
+ {
+ "epoch": 1.3133693851395705,
+ "grad_norm": 0.6699597835540771,
+ "learning_rate": 0.00011932265324326221,
+ "loss": 1.4171,
+ "step": 1313
+ },
+ {
+ "epoch": 1.3143696664686944,
+ "grad_norm": 0.6116276383399963,
+ "learning_rate": 0.0001192197935724573,
+ "loss": 1.3333,
+ "step": 1314
+ },
+ {
+ "epoch": 1.315369947797818,
+ "grad_norm": 0.6644623875617981,
+ "learning_rate": 0.00011911691278257511,
+ "loss": 1.5515,
+ "step": 1315
+ },
+ {
+ "epoch": 1.316370229126942,
+ "grad_norm": 0.6456226110458374,
+ "learning_rate": 0.0001190140109866631,
+ "loss": 1.2691,
+ "step": 1316
+ },
+ {
+ "epoch": 1.3173705104560658,
+ "grad_norm": 0.6665071249008179,
+ "learning_rate": 0.00011891108829779165,
+ "loss": 1.3782,
+ "step": 1317
+ },
+ {
+ "epoch": 1.3183707917851897,
+ "grad_norm": 0.7108166813850403,
+ "learning_rate": 0.00011880814482905422,
+ "loss": 1.3122,
+ "step": 1318
+ },
+ {
+ "epoch": 1.3193710731143133,
+ "grad_norm": 0.7184962630271912,
+ "learning_rate": 0.00011870518069356709,
+ "loss": 1.3624,
+ "step": 1319
+ },
+ {
+ "epoch": 1.3203713544434372,
+ "grad_norm": 0.6516618132591248,
+ "learning_rate": 0.0001186021960044692,
+ "loss": 1.5654,
+ "step": 1320
+ },
+ {
+ "epoch": 1.3213716357725611,
+ "grad_norm": 0.6597375869750977,
+ "learning_rate": 0.00011849919087492211,
+ "loss": 1.4765,
+ "step": 1321
+ },
+ {
+ "epoch": 1.3223719171016848,
+ "grad_norm": 0.8731528520584106,
+ "learning_rate": 0.00011839616541810983,
+ "loss": 1.3876,
+ "step": 1322
+ },
+ {
+ "epoch": 1.3233721984308087,
+ "grad_norm": 0.6694337725639343,
+ "learning_rate": 0.00011829311974723867,
+ "loss": 1.327,
+ "step": 1323
+ },
+ {
+ "epoch": 1.3243724797599326,
+ "grad_norm": 0.7454182505607605,
+ "learning_rate": 0.00011819005397553723,
+ "loss": 1.4202,
+ "step": 1324
+ },
+ {
+ "epoch": 1.3253727610890562,
+ "grad_norm": 0.6783546209335327,
+ "learning_rate": 0.00011808696821625613,
+ "loss": 1.3966,
+ "step": 1325
+ },
+ {
+ "epoch": 1.32637304241818,
+ "grad_norm": 0.7126333713531494,
+ "learning_rate": 0.000117983862582668,
+ "loss": 1.3896,
+ "step": 1326
+ },
+ {
+ "epoch": 1.327373323747304,
+ "grad_norm": 0.6765009164810181,
+ "learning_rate": 0.00011788073718806725,
+ "loss": 1.3678,
+ "step": 1327
+ },
+ {
+ "epoch": 1.3283736050764277,
+ "grad_norm": 0.6767436861991882,
+ "learning_rate": 0.00011777759214577006,
+ "loss": 1.497,
+ "step": 1328
+ },
+ {
+ "epoch": 1.3293738864055515,
+ "grad_norm": 0.7058733105659485,
+ "learning_rate": 0.00011767442756911417,
+ "loss": 1.4992,
+ "step": 1329
+ },
+ {
+ "epoch": 1.3303741677346754,
+ "grad_norm": 0.6815193295478821,
+ "learning_rate": 0.00011757124357145881,
+ "loss": 1.4952,
+ "step": 1330
+ },
+ {
+ "epoch": 1.3313744490637993,
+ "grad_norm": 0.6513908505439758,
+ "learning_rate": 0.00011746804026618452,
+ "loss": 1.5202,
+ "step": 1331
+ },
+ {
+ "epoch": 1.332374730392923,
+ "grad_norm": 0.6763479113578796,
+ "learning_rate": 0.00011736481776669306,
+ "loss": 1.4547,
+ "step": 1332
+ },
+ {
+ "epoch": 1.3333750117220469,
+ "grad_norm": 0.6361576914787292,
+ "learning_rate": 0.00011726157618640728,
+ "loss": 1.3231,
+ "step": 1333
+ },
+ {
+ "epoch": 1.3343752930511705,
+ "grad_norm": 0.7247117161750793,
+ "learning_rate": 0.00011715831563877104,
+ "loss": 1.5126,
+ "step": 1334
+ },
+ {
+ "epoch": 1.3353755743802944,
+ "grad_norm": 0.8347336649894714,
+ "learning_rate": 0.00011705503623724898,
+ "loss": 1.5669,
+ "step": 1335
+ },
+ {
+ "epoch": 1.3363758557094183,
+ "grad_norm": 0.6969489455223083,
+ "learning_rate": 0.00011695173809532652,
+ "loss": 1.3646,
+ "step": 1336
+ },
+ {
+ "epoch": 1.3373761370385422,
+ "grad_norm": 0.6771052479743958,
+ "learning_rate": 0.00011684842132650957,
+ "loss": 1.6715,
+ "step": 1337
+ },
+ {
+ "epoch": 1.3383764183676659,
+ "grad_norm": 0.6224768161773682,
+ "learning_rate": 0.00011674508604432464,
+ "loss": 1.5712,
+ "step": 1338
+ },
+ {
+ "epoch": 1.3393766996967897,
+ "grad_norm": 0.6684542298316956,
+ "learning_rate": 0.00011664173236231848,
+ "loss": 1.5669,
+ "step": 1339
+ },
+ {
+ "epoch": 1.3403769810259134,
+ "grad_norm": 0.6805415153503418,
+ "learning_rate": 0.0001165383603940581,
+ "loss": 1.3648,
+ "step": 1340
+ },
+ {
+ "epoch": 1.3413772623550373,
+ "grad_norm": 0.7991671562194824,
+ "learning_rate": 0.00011643497025313061,
+ "loss": 1.3703,
+ "step": 1341
+ },
+ {
+ "epoch": 1.3423775436841612,
+ "grad_norm": 0.7023015022277832,
+ "learning_rate": 0.00011633156205314309,
+ "loss": 1.366,
+ "step": 1342
+ },
+ {
+ "epoch": 1.343377825013285,
+ "grad_norm": 0.7017894387245178,
+ "learning_rate": 0.00011622813590772244,
+ "loss": 1.4816,
+ "step": 1343
+ },
+ {
+ "epoch": 1.3443781063424087,
+ "grad_norm": 0.704626739025116,
+ "learning_rate": 0.00011612469193051525,
+ "loss": 1.2841,
+ "step": 1344
+ },
+ {
+ "epoch": 1.3453783876715326,
+ "grad_norm": 0.707372784614563,
+ "learning_rate": 0.00011602123023518779,
+ "loss": 1.3226,
+ "step": 1345
+ },
+ {
+ "epoch": 1.3463786690006565,
+ "grad_norm": 0.6367921233177185,
+ "learning_rate": 0.00011591775093542572,
+ "loss": 1.3292,
+ "step": 1346
+ },
+ {
+ "epoch": 1.3473789503297802,
+ "grad_norm": 0.7131246328353882,
+ "learning_rate": 0.0001158142541449341,
+ "loss": 1.3537,
+ "step": 1347
+ },
+ {
+ "epoch": 1.348379231658904,
+ "grad_norm": 0.6140089631080627,
+ "learning_rate": 0.00011571073997743716,
+ "loss": 1.4316,
+ "step": 1348
+ },
+ {
+ "epoch": 1.349379512988028,
+ "grad_norm": 0.6347403526306152,
+ "learning_rate": 0.0001156072085466783,
+ "loss": 1.4214,
+ "step": 1349
+ },
+ {
+ "epoch": 1.3503797943171518,
+ "grad_norm": 0.5540759563446045,
+ "learning_rate": 0.00011550365996641979,
+ "loss": 1.25,
+ "step": 1350
+ },
+ {
+ "epoch": 1.3513800756462755,
+ "grad_norm": 0.6721670031547546,
+ "learning_rate": 0.00011540009435044281,
+ "loss": 1.381,
+ "step": 1351
+ },
+ {
+ "epoch": 1.3523803569753994,
+ "grad_norm": 0.5901767015457153,
+ "learning_rate": 0.00011529651181254723,
+ "loss": 1.5127,
+ "step": 1352
+ },
+ {
+ "epoch": 1.353380638304523,
+ "grad_norm": 0.6438884139060974,
+ "learning_rate": 0.0001151929124665516,
+ "loss": 1.3348,
+ "step": 1353
+ },
+ {
+ "epoch": 1.354380919633647,
+ "grad_norm": 0.9631819725036621,
+ "learning_rate": 0.00011508929642629274,
+ "loss": 1.5822,
+ "step": 1354
+ },
+ {
+ "epoch": 1.3553812009627708,
+ "grad_norm": 0.6426034569740295,
+ "learning_rate": 0.00011498566380562601,
+ "loss": 1.3487,
+ "step": 1355
+ },
+ {
+ "epoch": 1.3563814822918947,
+ "grad_norm": 0.682264506816864,
+ "learning_rate": 0.0001148820147184249,
+ "loss": 1.3331,
+ "step": 1356
+ },
+ {
+ "epoch": 1.3573817636210184,
+ "grad_norm": 0.746907114982605,
+ "learning_rate": 0.00011477834927858104,
+ "loss": 1.38,
+ "step": 1357
+ },
+ {
+ "epoch": 1.3583820449501423,
+ "grad_norm": 0.7521925568580627,
+ "learning_rate": 0.00011467466760000399,
+ "loss": 1.3798,
+ "step": 1358
+ },
+ {
+ "epoch": 1.359382326279266,
+ "grad_norm": 0.7887171506881714,
+ "learning_rate": 0.00011457096979662114,
+ "loss": 1.3306,
+ "step": 1359
+ },
+ {
+ "epoch": 1.3603826076083898,
+ "grad_norm": 0.7623118162155151,
+ "learning_rate": 0.00011446725598237767,
+ "loss": 1.4307,
+ "step": 1360
+ },
+ {
+ "epoch": 1.3613828889375137,
+ "grad_norm": 0.6182076930999756,
+ "learning_rate": 0.00011436352627123623,
+ "loss": 1.3776,
+ "step": 1361
+ },
+ {
+ "epoch": 1.3623831702666376,
+ "grad_norm": 0.6136983633041382,
+ "learning_rate": 0.00011425978077717709,
+ "loss": 1.4815,
+ "step": 1362
+ },
+ {
+ "epoch": 1.3633834515957612,
+ "grad_norm": 0.6165998578071594,
+ "learning_rate": 0.00011415601961419775,
+ "loss": 1.4252,
+ "step": 1363
+ },
+ {
+ "epoch": 1.3643837329248851,
+ "grad_norm": 0.7338213324546814,
+ "learning_rate": 0.00011405224289631295,
+ "loss": 1.4426,
+ "step": 1364
+ },
+ {
+ "epoch": 1.365384014254009,
+ "grad_norm": 0.8061873316764832,
+ "learning_rate": 0.00011394845073755455,
+ "loss": 1.5366,
+ "step": 1365
+ },
+ {
+ "epoch": 1.3663842955831327,
+ "grad_norm": 0.8496206402778625,
+ "learning_rate": 0.0001138446432519714,
+ "loss": 1.3305,
+ "step": 1366
+ },
+ {
+ "epoch": 1.3673845769122566,
+ "grad_norm": 0.7370564341545105,
+ "learning_rate": 0.00011374082055362909,
+ "loss": 1.5069,
+ "step": 1367
+ },
+ {
+ "epoch": 1.3683848582413805,
+ "grad_norm": 0.630095362663269,
+ "learning_rate": 0.00011363698275661001,
+ "loss": 1.2846,
+ "step": 1368
+ },
+ {
+ "epoch": 1.3693851395705043,
+ "grad_norm": 0.7039877772331238,
+ "learning_rate": 0.00011353312997501313,
+ "loss": 1.4091,
+ "step": 1369
+ },
+ {
+ "epoch": 1.370385420899628,
+ "grad_norm": 0.7010135650634766,
+ "learning_rate": 0.00011342926232295386,
+ "loss": 1.4068,
+ "step": 1370
+ },
+ {
+ "epoch": 1.371385702228752,
+ "grad_norm": 0.6542472243309021,
+ "learning_rate": 0.00011332537991456398,
+ "loss": 1.3422,
+ "step": 1371
+ },
+ {
+ "epoch": 1.3723859835578756,
+ "grad_norm": 0.7767142057418823,
+ "learning_rate": 0.00011322148286399147,
+ "loss": 1.3156,
+ "step": 1372
+ },
+ {
+ "epoch": 1.3733862648869994,
+ "grad_norm": 0.7862635254859924,
+ "learning_rate": 0.0001131175712854004,
+ "loss": 1.3952,
+ "step": 1373
+ },
+ {
+ "epoch": 1.3743865462161233,
+ "grad_norm": 0.7444994449615479,
+ "learning_rate": 0.00011301364529297079,
+ "loss": 1.2304,
+ "step": 1374
+ },
+ {
+ "epoch": 1.3753868275452472,
+ "grad_norm": 0.6078478693962097,
+ "learning_rate": 0.0001129097050008985,
+ "loss": 1.4364,
+ "step": 1375
+ },
+ {
+ "epoch": 1.3763871088743709,
+ "grad_norm": 0.7011739015579224,
+ "learning_rate": 0.00011280575052339514,
+ "loss": 1.4455,
+ "step": 1376
+ },
+ {
+ "epoch": 1.3773873902034948,
+ "grad_norm": 0.7185930013656616,
+ "learning_rate": 0.00011270178197468789,
+ "loss": 1.349,
+ "step": 1377
+ },
+ {
+ "epoch": 1.3783876715326184,
+ "grad_norm": 0.6734675168991089,
+ "learning_rate": 0.00011259779946901934,
+ "loss": 1.4803,
+ "step": 1378
+ },
+ {
+ "epoch": 1.3793879528617423,
+ "grad_norm": 0.674439013004303,
+ "learning_rate": 0.0001124938031206475,
+ "loss": 1.5707,
+ "step": 1379
+ },
+ {
+ "epoch": 1.3803882341908662,
+ "grad_norm": 0.6843717694282532,
+ "learning_rate": 0.00011238979304384554,
+ "loss": 1.5508,
+ "step": 1380
+ },
+ {
+ "epoch": 1.38138851551999,
+ "grad_norm": 0.6178708672523499,
+ "learning_rate": 0.0001122857693529017,
+ "loss": 1.2827,
+ "step": 1381
+ },
+ {
+ "epoch": 1.3823887968491138,
+ "grad_norm": 0.721108078956604,
+ "learning_rate": 0.0001121817321621192,
+ "loss": 1.3768,
+ "step": 1382
+ },
+ {
+ "epoch": 1.3833890781782376,
+ "grad_norm": 0.6790196299552917,
+ "learning_rate": 0.00011207768158581613,
+ "loss": 1.508,
+ "step": 1383
+ },
+ {
+ "epoch": 1.3843893595073615,
+ "grad_norm": 0.6942607760429382,
+ "learning_rate": 0.00011197361773832525,
+ "loss": 1.2476,
+ "step": 1384
+ },
+ {
+ "epoch": 1.3853896408364852,
+ "grad_norm": 0.6791447997093201,
+ "learning_rate": 0.00011186954073399387,
+ "loss": 1.5537,
+ "step": 1385
+ },
+ {
+ "epoch": 1.386389922165609,
+ "grad_norm": 0.6842163801193237,
+ "learning_rate": 0.00011176545068718385,
+ "loss": 1.4336,
+ "step": 1386
+ },
+ {
+ "epoch": 1.387390203494733,
+ "grad_norm": 0.6922981142997742,
+ "learning_rate": 0.0001116613477122713,
+ "loss": 1.2496,
+ "step": 1387
+ },
+ {
+ "epoch": 1.3883904848238566,
+ "grad_norm": 0.7480785250663757,
+ "learning_rate": 0.00011155723192364658,
+ "loss": 1.5798,
+ "step": 1388
+ },
+ {
+ "epoch": 1.3893907661529805,
+ "grad_norm": 0.6371482610702515,
+ "learning_rate": 0.00011145310343571411,
+ "loss": 1.326,
+ "step": 1389
+ },
+ {
+ "epoch": 1.3903910474821044,
+ "grad_norm": 0.663810670375824,
+ "learning_rate": 0.00011134896236289224,
+ "loss": 1.3021,
+ "step": 1390
+ },
+ {
+ "epoch": 1.391391328811228,
+ "grad_norm": 0.6067004799842834,
+ "learning_rate": 0.0001112448088196132,
+ "loss": 1.5062,
+ "step": 1391
+ },
+ {
+ "epoch": 1.392391610140352,
+ "grad_norm": 0.6500088572502136,
+ "learning_rate": 0.00011114064292032282,
+ "loss": 1.3196,
+ "step": 1392
+ },
+ {
+ "epoch": 1.3933918914694758,
+ "grad_norm": 0.7117498517036438,
+ "learning_rate": 0.0001110364647794807,
+ "loss": 1.354,
+ "step": 1393
+ },
+ {
+ "epoch": 1.3943921727985997,
+ "grad_norm": 0.5792518854141235,
+ "learning_rate": 0.00011093227451155974,
+ "loss": 1.1684,
+ "step": 1394
+ },
+ {
+ "epoch": 1.3953924541277234,
+ "grad_norm": 0.6920313835144043,
+ "learning_rate": 0.0001108280722310462,
+ "loss": 1.5114,
+ "step": 1395
+ },
+ {
+ "epoch": 1.3963927354568473,
+ "grad_norm": 0.5932325720787048,
+ "learning_rate": 0.0001107238580524395,
+ "loss": 1.4519,
+ "step": 1396
+ },
+ {
+ "epoch": 1.397393016785971,
+ "grad_norm": 0.7241511940956116,
+ "learning_rate": 0.00011061963209025223,
+ "loss": 1.4131,
+ "step": 1397
+ },
+ {
+ "epoch": 1.3983932981150948,
+ "grad_norm": 0.6144927740097046,
+ "learning_rate": 0.00011051539445900983,
+ "loss": 1.4436,
+ "step": 1398
+ },
+ {
+ "epoch": 1.3993935794442187,
+ "grad_norm": 0.589124321937561,
+ "learning_rate": 0.00011041114527325065,
+ "loss": 1.4069,
+ "step": 1399
+ },
+ {
+ "epoch": 1.4003938607733426,
+ "grad_norm": 0.6695122122764587,
+ "learning_rate": 0.00011030688464752566,
+ "loss": 1.6472,
+ "step": 1400
+ },
+ {
+ "epoch": 1.4013941421024663,
+ "grad_norm": 0.6082026362419128,
+ "learning_rate": 0.00011020261269639842,
+ "loss": 1.3188,
+ "step": 1401
+ },
+ {
+ "epoch": 1.4023944234315902,
+ "grad_norm": 0.8926504254341125,
+ "learning_rate": 0.000110098329534445,
+ "loss": 1.4195,
+ "step": 1402
+ },
+ {
+ "epoch": 1.4033947047607138,
+ "grad_norm": 0.6825796365737915,
+ "learning_rate": 0.00010999403527625367,
+ "loss": 1.5214,
+ "step": 1403
+ },
+ {
+ "epoch": 1.4043949860898377,
+ "grad_norm": 0.6856653690338135,
+ "learning_rate": 0.00010988973003642499,
+ "loss": 1.4579,
+ "step": 1404
+ },
+ {
+ "epoch": 1.4053952674189616,
+ "grad_norm": 0.6756052374839783,
+ "learning_rate": 0.00010978541392957156,
+ "loss": 1.331,
+ "step": 1405
+ },
+ {
+ "epoch": 1.4063955487480855,
+ "grad_norm": 0.6562577486038208,
+ "learning_rate": 0.00010968108707031792,
+ "loss": 1.2906,
+ "step": 1406
+ },
+ {
+ "epoch": 1.4073958300772091,
+ "grad_norm": 0.7208355069160461,
+ "learning_rate": 0.00010957674957330042,
+ "loss": 1.5163,
+ "step": 1407
+ },
+ {
+ "epoch": 1.408396111406333,
+ "grad_norm": 0.6576356291770935,
+ "learning_rate": 0.00010947240155316707,
+ "loss": 1.3934,
+ "step": 1408
+ },
+ {
+ "epoch": 1.409396392735457,
+ "grad_norm": 0.6244364976882935,
+ "learning_rate": 0.00010936804312457749,
+ "loss": 1.3903,
+ "step": 1409
+ },
+ {
+ "epoch": 1.4103966740645806,
+ "grad_norm": 0.5791237354278564,
+ "learning_rate": 0.00010926367440220276,
+ "loss": 1.2708,
+ "step": 1410
+ },
+ {
+ "epoch": 1.4113969553937045,
+ "grad_norm": 0.7043458819389343,
+ "learning_rate": 0.00010915929550072517,
+ "loss": 1.2446,
+ "step": 1411
+ },
+ {
+ "epoch": 1.4123972367228284,
+ "grad_norm": 0.5865835547447205,
+ "learning_rate": 0.00010905490653483827,
+ "loss": 1.657,
+ "step": 1412
+ },
+ {
+ "epoch": 1.4133975180519522,
+ "grad_norm": 0.6033587455749512,
+ "learning_rate": 0.00010895050761924668,
+ "loss": 1.4481,
+ "step": 1413
+ },
+ {
+ "epoch": 1.414397799381076,
+ "grad_norm": 0.7100054025650024,
+ "learning_rate": 0.00010884609886866588,
+ "loss": 1.5413,
+ "step": 1414
+ },
+ {
+ "epoch": 1.4153980807101998,
+ "grad_norm": 0.8067667484283447,
+ "learning_rate": 0.00010874168039782227,
+ "loss": 1.4327,
+ "step": 1415
+ },
+ {
+ "epoch": 1.4163983620393235,
+ "grad_norm": 0.7058808207511902,
+ "learning_rate": 0.00010863725232145286,
+ "loss": 1.534,
+ "step": 1416
+ },
+ {
+ "epoch": 1.4173986433684473,
+ "grad_norm": 0.5253664255142212,
+ "learning_rate": 0.00010853281475430517,
+ "loss": 1.0816,
+ "step": 1417
+ },
+ {
+ "epoch": 1.4183989246975712,
+ "grad_norm": 0.6874144673347473,
+ "learning_rate": 0.0001084283678111372,
+ "loss": 1.4386,
+ "step": 1418
+ },
+ {
+ "epoch": 1.4193992060266951,
+ "grad_norm": 0.74046790599823,
+ "learning_rate": 0.00010832391160671729,
+ "loss": 1.3393,
+ "step": 1419
+ },
+ {
+ "epoch": 1.4203994873558188,
+ "grad_norm": 0.6461816430091858,
+ "learning_rate": 0.00010821944625582392,
+ "loss": 1.5251,
+ "step": 1420
+ },
+ {
+ "epoch": 1.4213997686849427,
+ "grad_norm": 0.6058275699615479,
+ "learning_rate": 0.00010811497187324555,
+ "loss": 1.1555,
+ "step": 1421
+ },
+ {
+ "epoch": 1.4224000500140663,
+ "grad_norm": 0.6121847033500671,
+ "learning_rate": 0.00010801048857378071,
+ "loss": 1.248,
+ "step": 1422
+ },
+ {
+ "epoch": 1.4234003313431902,
+ "grad_norm": 0.5949802398681641,
+ "learning_rate": 0.00010790599647223763,
+ "loss": 1.5056,
+ "step": 1423
+ },
+ {
+ "epoch": 1.424400612672314,
+ "grad_norm": 0.6441097259521484,
+ "learning_rate": 0.0001078014956834342,
+ "loss": 1.5484,
+ "step": 1424
+ },
+ {
+ "epoch": 1.425400894001438,
+ "grad_norm": 0.686570942401886,
+ "learning_rate": 0.00010769698632219794,
+ "loss": 1.631,
+ "step": 1425
+ },
+ {
+ "epoch": 1.4264011753305617,
+ "grad_norm": 0.675699770450592,
+ "learning_rate": 0.00010759246850336572,
+ "loss": 1.4044,
+ "step": 1426
+ },
+ {
+ "epoch": 1.4274014566596855,
+ "grad_norm": 0.7777390480041504,
+ "learning_rate": 0.0001074879423417837,
+ "loss": 1.6326,
+ "step": 1427
+ },
+ {
+ "epoch": 1.4284017379888094,
+ "grad_norm": 0.6218550205230713,
+ "learning_rate": 0.00010738340795230721,
+ "loss": 1.5341,
+ "step": 1428
+ },
+ {
+ "epoch": 1.429402019317933,
+ "grad_norm": 0.7164304256439209,
+ "learning_rate": 0.00010727886544980068,
+ "loss": 1.5888,
+ "step": 1429
+ },
+ {
+ "epoch": 1.430402300647057,
+ "grad_norm": 0.6924182772636414,
+ "learning_rate": 0.00010717431494913741,
+ "loss": 1.2542,
+ "step": 1430
+ },
+ {
+ "epoch": 1.4314025819761809,
+ "grad_norm": 0.6145774126052856,
+ "learning_rate": 0.00010706975656519946,
+ "loss": 1.4038,
+ "step": 1431
+ },
+ {
+ "epoch": 1.4324028633053048,
+ "grad_norm": 0.5560014843940735,
+ "learning_rate": 0.00010696519041287765,
+ "loss": 1.2659,
+ "step": 1432
+ },
+ {
+ "epoch": 1.4334031446344284,
+ "grad_norm": 0.6854751110076904,
+ "learning_rate": 0.0001068606166070712,
+ "loss": 1.4407,
+ "step": 1433
+ },
+ {
+ "epoch": 1.4344034259635523,
+ "grad_norm": 0.6936755180358887,
+ "learning_rate": 0.00010675603526268785,
+ "loss": 1.4219,
+ "step": 1434
+ },
+ {
+ "epoch": 1.435403707292676,
+ "grad_norm": 0.8174700736999512,
+ "learning_rate": 0.00010665144649464356,
+ "loss": 1.397,
+ "step": 1435
+ },
+ {
+ "epoch": 1.4364039886217999,
+ "grad_norm": 0.7231045365333557,
+ "learning_rate": 0.00010654685041786249,
+ "loss": 1.4558,
+ "step": 1436
+ },
+ {
+ "epoch": 1.4374042699509237,
+ "grad_norm": 0.6431431174278259,
+ "learning_rate": 0.00010644224714727681,
+ "loss": 1.3522,
+ "step": 1437
+ },
+ {
+ "epoch": 1.4384045512800476,
+ "grad_norm": 0.7025414109230042,
+ "learning_rate": 0.0001063376367978266,
+ "loss": 1.2723,
+ "step": 1438
+ },
+ {
+ "epoch": 1.4394048326091713,
+ "grad_norm": 0.6382789611816406,
+ "learning_rate": 0.00010623301948445971,
+ "loss": 1.4065,
+ "step": 1439
+ },
+ {
+ "epoch": 1.4404051139382952,
+ "grad_norm": 0.7055328488349915,
+ "learning_rate": 0.00010612839532213164,
+ "loss": 1.6186,
+ "step": 1440
+ },
+ {
+ "epoch": 1.4414053952674188,
+ "grad_norm": 0.6769623160362244,
+ "learning_rate": 0.00010602376442580544,
+ "loss": 1.49,
+ "step": 1441
+ },
+ {
+ "epoch": 1.4424056765965427,
+ "grad_norm": 0.6875097751617432,
+ "learning_rate": 0.00010591912691045152,
+ "loss": 1.3063,
+ "step": 1442
+ },
+ {
+ "epoch": 1.4434059579256666,
+ "grad_norm": 0.7747283577919006,
+ "learning_rate": 0.00010581448289104758,
+ "loss": 1.67,
+ "step": 1443
+ },
+ {
+ "epoch": 1.4444062392547905,
+ "grad_norm": 0.7236614227294922,
+ "learning_rate": 0.00010570983248257853,
+ "loss": 1.4703,
+ "step": 1444
+ },
+ {
+ "epoch": 1.4454065205839142,
+ "grad_norm": 0.7141956686973572,
+ "learning_rate": 0.00010560517580003617,
+ "loss": 1.5828,
+ "step": 1445
+ },
+ {
+ "epoch": 1.446406801913038,
+ "grad_norm": 0.679790198802948,
+ "learning_rate": 0.00010550051295841931,
+ "loss": 1.4706,
+ "step": 1446
+ },
+ {
+ "epoch": 1.447407083242162,
+ "grad_norm": 0.7930448651313782,
+ "learning_rate": 0.00010539584407273349,
+ "loss": 1.5388,
+ "step": 1447
+ },
+ {
+ "epoch": 1.4484073645712856,
+ "grad_norm": 0.7099994421005249,
+ "learning_rate": 0.00010529116925799085,
+ "loss": 1.408,
+ "step": 1448
+ },
+ {
+ "epoch": 1.4494076459004095,
+ "grad_norm": 0.6459046602249146,
+ "learning_rate": 0.00010518648862921012,
+ "loss": 1.3263,
+ "step": 1449
+ },
+ {
+ "epoch": 1.4504079272295334,
+ "grad_norm": 0.761446475982666,
+ "learning_rate": 0.00010508180230141635,
+ "loss": 1.3166,
+ "step": 1450
+ },
+ {
+ "epoch": 1.451408208558657,
+ "grad_norm": 0.6198295950889587,
+ "learning_rate": 0.00010497711038964086,
+ "loss": 1.3191,
+ "step": 1451
+ },
+ {
+ "epoch": 1.452408489887781,
+ "grad_norm": 0.6751184463500977,
+ "learning_rate": 0.0001048724130089212,
+ "loss": 1.1869,
+ "step": 1452
+ },
+ {
+ "epoch": 1.4534087712169048,
+ "grad_norm": 0.6853645443916321,
+ "learning_rate": 0.00010476771027430086,
+ "loss": 1.2579,
+ "step": 1453
+ },
+ {
+ "epoch": 1.4544090525460285,
+ "grad_norm": 0.6402629017829895,
+ "learning_rate": 0.00010466300230082911,
+ "loss": 1.3192,
+ "step": 1454
+ },
+ {
+ "epoch": 1.4554093338751524,
+ "grad_norm": 0.7862108945846558,
+ "learning_rate": 0.00010455828920356115,
+ "loss": 1.3619,
+ "step": 1455
+ },
+ {
+ "epoch": 1.4564096152042763,
+ "grad_norm": 0.7008057832717896,
+ "learning_rate": 0.00010445357109755771,
+ "loss": 1.3524,
+ "step": 1456
+ },
+ {
+ "epoch": 1.4574098965334001,
+ "grad_norm": 0.6477895379066467,
+ "learning_rate": 0.00010434884809788508,
+ "loss": 1.6172,
+ "step": 1457
+ },
+ {
+ "epoch": 1.4584101778625238,
+ "grad_norm": 0.6312345862388611,
+ "learning_rate": 0.00010424412031961484,
+ "loss": 1.2121,
+ "step": 1458
+ },
+ {
+ "epoch": 1.4594104591916477,
+ "grad_norm": 0.6922104358673096,
+ "learning_rate": 0.00010413938787782394,
+ "loss": 1.3243,
+ "step": 1459
+ },
+ {
+ "epoch": 1.4604107405207714,
+ "grad_norm": 0.670599102973938,
+ "learning_rate": 0.00010403465088759437,
+ "loss": 1.346,
+ "step": 1460
+ },
+ {
+ "epoch": 1.4614110218498952,
+ "grad_norm": 0.7351789474487305,
+ "learning_rate": 0.00010392990946401313,
+ "loss": 1.5343,
+ "step": 1461
+ },
+ {
+ "epoch": 1.4624113031790191,
+ "grad_norm": 0.6756190061569214,
+ "learning_rate": 0.00010382516372217215,
+ "loss": 1.4118,
+ "step": 1462
+ },
+ {
+ "epoch": 1.463411584508143,
+ "grad_norm": 0.6219121217727661,
+ "learning_rate": 0.000103720413777168,
+ "loss": 1.3602,
+ "step": 1463
+ },
+ {
+ "epoch": 1.4644118658372667,
+ "grad_norm": 0.6602663993835449,
+ "learning_rate": 0.00010361565974410192,
+ "loss": 1.4921,
+ "step": 1464
+ },
+ {
+ "epoch": 1.4654121471663906,
+ "grad_norm": 0.6103453636169434,
+ "learning_rate": 0.00010351090173807969,
+ "loss": 1.3259,
+ "step": 1465
+ },
+ {
+ "epoch": 1.4664124284955142,
+ "grad_norm": 0.745473325252533,
+ "learning_rate": 0.00010340613987421137,
+ "loss": 1.6036,
+ "step": 1466
+ },
+ {
+ "epoch": 1.4674127098246381,
+ "grad_norm": 0.6537976861000061,
+ "learning_rate": 0.00010330137426761135,
+ "loss": 1.3511,
+ "step": 1467
+ },
+ {
+ "epoch": 1.468412991153762,
+ "grad_norm": 0.7108463048934937,
+ "learning_rate": 0.00010319660503339808,
+ "loss": 1.4814,
+ "step": 1468
+ },
+ {
+ "epoch": 1.469413272482886,
+ "grad_norm": 0.6372820734977722,
+ "learning_rate": 0.00010309183228669397,
+ "loss": 1.468,
+ "step": 1469
+ },
+ {
+ "epoch": 1.4704135538120096,
+ "grad_norm": 0.6098326444625854,
+ "learning_rate": 0.00010298705614262532,
+ "loss": 1.6763,
+ "step": 1470
+ },
+ {
+ "epoch": 1.4714138351411334,
+ "grad_norm": 0.6385009288787842,
+ "learning_rate": 0.0001028822767163222,
+ "loss": 1.3058,
+ "step": 1471
+ },
+ {
+ "epoch": 1.4724141164702573,
+ "grad_norm": 0.6848032474517822,
+ "learning_rate": 0.00010277749412291824,
+ "loss": 1.4115,
+ "step": 1472
+ },
+ {
+ "epoch": 1.473414397799381,
+ "grad_norm": 0.7532572746276855,
+ "learning_rate": 0.00010267270847755048,
+ "loss": 1.4219,
+ "step": 1473
+ },
+ {
+ "epoch": 1.4744146791285049,
+ "grad_norm": 0.7336605787277222,
+ "learning_rate": 0.00010256791989535952,
+ "loss": 1.4092,
+ "step": 1474
+ },
+ {
+ "epoch": 1.4754149604576288,
+ "grad_norm": 0.6300507187843323,
+ "learning_rate": 0.00010246312849148899,
+ "loss": 1.2911,
+ "step": 1475
+ },
+ {
+ "epoch": 1.4764152417867527,
+ "grad_norm": 0.7114218473434448,
+ "learning_rate": 0.00010235833438108571,
+ "loss": 1.5038,
+ "step": 1476
+ },
+ {
+ "epoch": 1.4774155231158763,
+ "grad_norm": 0.7215398550033569,
+ "learning_rate": 0.00010225353767929944,
+ "loss": 1.4919,
+ "step": 1477
+ },
+ {
+ "epoch": 1.4784158044450002,
+ "grad_norm": 0.6189507246017456,
+ "learning_rate": 0.00010214873850128282,
+ "loss": 1.2092,
+ "step": 1478
+ },
+ {
+ "epoch": 1.4794160857741239,
+ "grad_norm": 0.5806283950805664,
+ "learning_rate": 0.00010204393696219117,
+ "loss": 1.2862,
+ "step": 1479
+ },
+ {
+ "epoch": 1.4804163671032478,
+ "grad_norm": 0.7068900465965271,
+ "learning_rate": 0.00010193913317718244,
+ "loss": 1.319,
+ "step": 1480
+ },
+ {
+ "epoch": 1.4814166484323716,
+ "grad_norm": 0.749792218208313,
+ "learning_rate": 0.00010183432726141706,
+ "loss": 1.3661,
+ "step": 1481
+ },
+ {
+ "epoch": 1.4824169297614955,
+ "grad_norm": 0.7314055562019348,
+ "learning_rate": 0.00010172951933005775,
+ "loss": 1.5695,
+ "step": 1482
+ },
+ {
+ "epoch": 1.4834172110906192,
+ "grad_norm": 0.6871920228004456,
+ "learning_rate": 0.00010162470949826948,
+ "loss": 1.3598,
+ "step": 1483
+ },
+ {
+ "epoch": 1.484417492419743,
+ "grad_norm": 0.7139384150505066,
+ "learning_rate": 0.0001015198978812193,
+ "loss": 1.4942,
+ "step": 1484
+ },
+ {
+ "epoch": 1.4854177737488667,
+ "grad_norm": 0.6459400653839111,
+ "learning_rate": 0.00010141508459407623,
+ "loss": 1.3971,
+ "step": 1485
+ },
+ {
+ "epoch": 1.4864180550779906,
+ "grad_norm": 0.8157202005386353,
+ "learning_rate": 0.0001013102697520111,
+ "loss": 1.4679,
+ "step": 1486
+ },
+ {
+ "epoch": 1.4874183364071145,
+ "grad_norm": 0.6978387832641602,
+ "learning_rate": 0.00010120545347019647,
+ "loss": 1.4547,
+ "step": 1487
+ },
+ {
+ "epoch": 1.4884186177362384,
+ "grad_norm": 0.641835629940033,
+ "learning_rate": 0.00010110063586380646,
+ "loss": 1.6611,
+ "step": 1488
+ },
+ {
+ "epoch": 1.489418899065362,
+ "grad_norm": 0.723709225654602,
+ "learning_rate": 0.00010099581704801673,
+ "loss": 1.3994,
+ "step": 1489
+ },
+ {
+ "epoch": 1.490419180394486,
+ "grad_norm": 0.6613619327545166,
+ "learning_rate": 0.00010089099713800414,
+ "loss": 1.5722,
+ "step": 1490
+ },
+ {
+ "epoch": 1.4914194617236098,
+ "grad_norm": 0.6406750082969666,
+ "learning_rate": 0.00010078617624894684,
+ "loss": 1.312,
+ "step": 1491
+ },
+ {
+ "epoch": 1.4924197430527335,
+ "grad_norm": 0.5216225385665894,
+ "learning_rate": 0.000100681354496024,
+ "loss": 1.3552,
+ "step": 1492
+ },
+ {
+ "epoch": 1.4934200243818574,
+ "grad_norm": 0.7549086809158325,
+ "learning_rate": 0.00010057653199441581,
+ "loss": 1.4344,
+ "step": 1493
+ },
+ {
+ "epoch": 1.4944203057109813,
+ "grad_norm": 0.6958007216453552,
+ "learning_rate": 0.00010047170885930324,
+ "loss": 1.254,
+ "step": 1494
+ },
+ {
+ "epoch": 1.4954205870401052,
+ "grad_norm": 0.706564724445343,
+ "learning_rate": 0.00010036688520586788,
+ "loss": 1.4854,
+ "step": 1495
+ },
+ {
+ "epoch": 1.4964208683692288,
+ "grad_norm": 0.6802704930305481,
+ "learning_rate": 0.00010026206114929209,
+ "loss": 1.4631,
+ "step": 1496
+ },
+ {
+ "epoch": 1.4974211496983527,
+ "grad_norm": 0.645449697971344,
+ "learning_rate": 0.00010015723680475846,
+ "loss": 1.5165,
+ "step": 1497
+ },
+ {
+ "epoch": 1.4984214310274764,
+ "grad_norm": 0.5729085206985474,
+ "learning_rate": 0.00010005241228745004,
+ "loss": 1.2683,
+ "step": 1498
+ },
+ {
+ "epoch": 1.4994217123566003,
+ "grad_norm": 0.6592169404029846,
+ "learning_rate": 9.994758771254997e-05,
+ "loss": 1.4722,
+ "step": 1499
+ },
+ {
+ "epoch": 1.5004219936857242,
+ "grad_norm": 0.6299737691879272,
+ "learning_rate": 9.984276319524154e-05,
+ "loss": 1.3664,
+ "step": 1500
+ },
+ {
+ "epoch": 1.501422275014848,
+ "grad_norm": 0.6960833072662354,
+ "learning_rate": 9.973793885070792e-05,
+ "loss": 1.3959,
+ "step": 1501
+ },
+ {
+ "epoch": 1.5024225563439717,
+ "grad_norm": 0.7174006700515747,
+ "learning_rate": 9.963311479413211e-05,
+ "loss": 1.5448,
+ "step": 1502
+ },
+ {
+ "epoch": 1.5034228376730956,
+ "grad_norm": 0.6396325826644897,
+ "learning_rate": 9.95282911406968e-05,
+ "loss": 1.5168,
+ "step": 1503
+ },
+ {
+ "epoch": 1.5044231190022193,
+ "grad_norm": 0.809868335723877,
+ "learning_rate": 9.942346800558421e-05,
+ "loss": 1.4467,
+ "step": 1504
+ },
+ {
+ "epoch": 1.5054234003313431,
+ "grad_norm": 0.6106623411178589,
+ "learning_rate": 9.931864550397601e-05,
+ "loss": 1.519,
+ "step": 1505
+ },
+ {
+ "epoch": 1.506423681660467,
+ "grad_norm": 0.7253887057304382,
+ "learning_rate": 9.921382375105318e-05,
+ "loss": 1.3442,
+ "step": 1506
+ },
+ {
+ "epoch": 1.507423962989591,
+ "grad_norm": 0.6945338249206543,
+ "learning_rate": 9.910900286199587e-05,
+ "loss": 1.5319,
+ "step": 1507
+ },
+ {
+ "epoch": 1.5084242443187148,
+ "grad_norm": 0.682486891746521,
+ "learning_rate": 9.900418295198328e-05,
+ "loss": 1.3558,
+ "step": 1508
+ },
+ {
+ "epoch": 1.5094245256478385,
+ "grad_norm": 0.6952700018882751,
+ "learning_rate": 9.889936413619356e-05,
+ "loss": 1.3194,
+ "step": 1509
+ },
+ {
+ "epoch": 1.5104248069769621,
+ "grad_norm": 0.6775678992271423,
+ "learning_rate": 9.879454652980358e-05,
+ "loss": 1.5936,
+ "step": 1510
+ },
+ {
+ "epoch": 1.511425088306086,
+ "grad_norm": 0.8723187446594238,
+ "learning_rate": 9.868973024798895e-05,
+ "loss": 1.4172,
+ "step": 1511
+ },
+ {
+ "epoch": 1.51242536963521,
+ "grad_norm": 0.8881109952926636,
+ "learning_rate": 9.858491540592382e-05,
+ "loss": 1.4405,
+ "step": 1512
+ },
+ {
+ "epoch": 1.5134256509643338,
+ "grad_norm": 0.6580207347869873,
+ "learning_rate": 9.848010211878074e-05,
+ "loss": 1.6241,
+ "step": 1513
+ },
+ {
+ "epoch": 1.5144259322934577,
+ "grad_norm": 0.6160255074501038,
+ "learning_rate": 9.837529050173052e-05,
+ "loss": 1.4222,
+ "step": 1514
+ },
+ {
+ "epoch": 1.5154262136225813,
+ "grad_norm": 0.7516399025917053,
+ "learning_rate": 9.827048066994225e-05,
+ "loss": 1.2495,
+ "step": 1515
+ },
+ {
+ "epoch": 1.516426494951705,
+ "grad_norm": 0.5907468795776367,
+ "learning_rate": 9.816567273858296e-05,
+ "loss": 1.1453,
+ "step": 1516
+ },
+ {
+ "epoch": 1.517426776280829,
+ "grad_norm": 0.7371746301651001,
+ "learning_rate": 9.806086682281758e-05,
+ "loss": 1.5006,
+ "step": 1517
+ },
+ {
+ "epoch": 1.5184270576099528,
+ "grad_norm": 0.6646453142166138,
+ "learning_rate": 9.795606303780885e-05,
+ "loss": 1.5237,
+ "step": 1518
+ },
+ {
+ "epoch": 1.5194273389390767,
+ "grad_norm": 0.7043606638908386,
+ "learning_rate": 9.785126149871722e-05,
+ "loss": 1.3339,
+ "step": 1519
+ },
+ {
+ "epoch": 1.5204276202682006,
+ "grad_norm": 0.8110997676849365,
+ "learning_rate": 9.77464623207006e-05,
+ "loss": 1.5193,
+ "step": 1520
+ },
+ {
+ "epoch": 1.5214279015973242,
+ "grad_norm": 0.6463339328765869,
+ "learning_rate": 9.764166561891432e-05,
+ "loss": 1.412,
+ "step": 1521
+ },
+ {
+ "epoch": 1.522428182926448,
+ "grad_norm": 0.7538262605667114,
+ "learning_rate": 9.753687150851102e-05,
+ "loss": 1.5326,
+ "step": 1522
+ },
+ {
+ "epoch": 1.5234284642555718,
+ "grad_norm": 0.7361929416656494,
+ "learning_rate": 9.74320801046405e-05,
+ "loss": 1.4104,
+ "step": 1523
+ },
+ {
+ "epoch": 1.5244287455846957,
+ "grad_norm": 0.7260544896125793,
+ "learning_rate": 9.732729152244953e-05,
+ "loss": 1.6037,
+ "step": 1524
+ },
+ {
+ "epoch": 1.5254290269138195,
+ "grad_norm": 0.6636849641799927,
+ "learning_rate": 9.722250587708181e-05,
+ "loss": 1.2908,
+ "step": 1525
+ },
+ {
+ "epoch": 1.5264293082429434,
+ "grad_norm": 0.7487931251525879,
+ "learning_rate": 9.711772328367784e-05,
+ "loss": 1.5143,
+ "step": 1526
+ },
+ {
+ "epoch": 1.527429589572067,
+ "grad_norm": 0.7224540710449219,
+ "learning_rate": 9.70129438573747e-05,
+ "loss": 1.4773,
+ "step": 1527
+ },
+ {
+ "epoch": 1.528429870901191,
+ "grad_norm": 0.6425575017929077,
+ "learning_rate": 9.690816771330608e-05,
+ "loss": 1.329,
+ "step": 1528
+ },
+ {
+ "epoch": 1.5294301522303146,
+ "grad_norm": 0.6902957558631897,
+ "learning_rate": 9.680339496660192e-05,
+ "loss": 1.3085,
+ "step": 1529
+ },
+ {
+ "epoch": 1.5304304335594385,
+ "grad_norm": 0.6722397804260254,
+ "learning_rate": 9.669862573238863e-05,
+ "loss": 1.5163,
+ "step": 1530
+ },
+ {
+ "epoch": 1.5314307148885624,
+ "grad_norm": 0.7586985230445862,
+ "learning_rate": 9.659386012578863e-05,
+ "loss": 1.3072,
+ "step": 1531
+ },
+ {
+ "epoch": 1.5324309962176863,
+ "grad_norm": 0.7313751578330994,
+ "learning_rate": 9.648909826192033e-05,
+ "loss": 1.4071,
+ "step": 1532
+ },
+ {
+ "epoch": 1.5334312775468102,
+ "grad_norm": 0.6215599775314331,
+ "learning_rate": 9.63843402558981e-05,
+ "loss": 1.4255,
+ "step": 1533
+ },
+ {
+ "epoch": 1.5344315588759339,
+ "grad_norm": 0.7188824415206909,
+ "learning_rate": 9.627958622283203e-05,
+ "loss": 1.4148,
+ "step": 1534
+ },
+ {
+ "epoch": 1.5354318402050575,
+ "grad_norm": 0.6444137692451477,
+ "learning_rate": 9.617483627782788e-05,
+ "loss": 1.6461,
+ "step": 1535
+ },
+ {
+ "epoch": 1.5364321215341814,
+ "grad_norm": 0.7150428295135498,
+ "learning_rate": 9.607009053598689e-05,
+ "loss": 1.5799,
+ "step": 1536
+ },
+ {
+ "epoch": 1.5374324028633053,
+ "grad_norm": 0.6654619574546814,
+ "learning_rate": 9.596534911240566e-05,
+ "loss": 1.3762,
+ "step": 1537
+ },
+ {
+ "epoch": 1.5384326841924292,
+ "grad_norm": 0.5966542363166809,
+ "learning_rate": 9.58606121221761e-05,
+ "loss": 1.4305,
+ "step": 1538
+ },
+ {
+ "epoch": 1.539432965521553,
+ "grad_norm": 0.6061896085739136,
+ "learning_rate": 9.57558796803852e-05,
+ "loss": 1.2492,
+ "step": 1539
+ },
+ {
+ "epoch": 1.5404332468506767,
+ "grad_norm": 0.7098972797393799,
+ "learning_rate": 9.565115190211497e-05,
+ "loss": 1.3917,
+ "step": 1540
+ },
+ {
+ "epoch": 1.5414335281798006,
+ "grad_norm": 0.7661631107330322,
+ "learning_rate": 9.554642890244233e-05,
+ "loss": 1.5048,
+ "step": 1541
+ },
+ {
+ "epoch": 1.5424338095089243,
+ "grad_norm": 2.214449405670166,
+ "learning_rate": 9.54417107964389e-05,
+ "loss": 1.3739,
+ "step": 1542
+ },
+ {
+ "epoch": 1.5434340908380482,
+ "grad_norm": 0.6061079502105713,
+ "learning_rate": 9.533699769917092e-05,
+ "loss": 1.4466,
+ "step": 1543
+ },
+ {
+ "epoch": 1.544434372167172,
+ "grad_norm": 0.76081383228302,
+ "learning_rate": 9.523228972569917e-05,
+ "loss": 1.2733,
+ "step": 1544
+ },
+ {
+ "epoch": 1.545434653496296,
+ "grad_norm": 0.6445167064666748,
+ "learning_rate": 9.512758699107879e-05,
+ "loss": 1.404,
+ "step": 1545
+ },
+ {
+ "epoch": 1.5464349348254196,
+ "grad_norm": 0.53884357213974,
+ "learning_rate": 9.502288961035912e-05,
+ "loss": 1.2348,
+ "step": 1546
+ },
+ {
+ "epoch": 1.5474352161545435,
+ "grad_norm": 0.589690625667572,
+ "learning_rate": 9.491819769858366e-05,
+ "loss": 1.2912,
+ "step": 1547
+ },
+ {
+ "epoch": 1.5484354974836672,
+ "grad_norm": 0.6259596347808838,
+ "learning_rate": 9.48135113707899e-05,
+ "loss": 1.4254,
+ "step": 1548
+ },
+ {
+ "epoch": 1.549435778812791,
+ "grad_norm": 0.5648382306098938,
+ "learning_rate": 9.470883074200916e-05,
+ "loss": 1.205,
+ "step": 1549
+ },
+ {
+ "epoch": 1.550436060141915,
+ "grad_norm": 0.6659985780715942,
+ "learning_rate": 9.460415592726653e-05,
+ "loss": 1.2726,
+ "step": 1550
+ },
+ {
+ "epoch": 1.5514363414710388,
+ "grad_norm": 0.6457205414772034,
+ "learning_rate": 9.449948704158071e-05,
+ "loss": 1.4254,
+ "step": 1551
+ },
+ {
+ "epoch": 1.5524366228001627,
+ "grad_norm": 0.753474235534668,
+ "learning_rate": 9.439482419996384e-05,
+ "loss": 1.3446,
+ "step": 1552
+ },
+ {
+ "epoch": 1.5534369041292864,
+ "grad_norm": 0.6353628039360046,
+ "learning_rate": 9.42901675174215e-05,
+ "loss": 1.2947,
+ "step": 1553
+ },
+ {
+ "epoch": 1.55443718545841,
+ "grad_norm": 0.5484879016876221,
+ "learning_rate": 9.418551710895243e-05,
+ "loss": 1.3678,
+ "step": 1554
+ },
+ {
+ "epoch": 1.555437466787534,
+ "grad_norm": 0.564643919467926,
+ "learning_rate": 9.408087308954853e-05,
+ "loss": 1.2557,
+ "step": 1555
+ },
+ {
+ "epoch": 1.5564377481166578,
+ "grad_norm": 0.6570972800254822,
+ "learning_rate": 9.397623557419461e-05,
+ "loss": 1.3769,
+ "step": 1556
+ },
+ {
+ "epoch": 1.5574380294457817,
+ "grad_norm": 0.5943097472190857,
+ "learning_rate": 9.38716046778684e-05,
+ "loss": 1.2613,
+ "step": 1557
+ },
+ {
+ "epoch": 1.5584383107749056,
+ "grad_norm": 0.6349796056747437,
+ "learning_rate": 9.37669805155403e-05,
+ "loss": 1.4075,
+ "step": 1558
+ },
+ {
+ "epoch": 1.5594385921040292,
+ "grad_norm": 0.6645040512084961,
+ "learning_rate": 9.366236320217339e-05,
+ "loss": 1.6224,
+ "step": 1559
+ },
+ {
+ "epoch": 1.5604388734331531,
+ "grad_norm": 0.7049742937088013,
+ "learning_rate": 9.355775285272318e-05,
+ "loss": 1.3948,
+ "step": 1560
+ },
+ {
+ "epoch": 1.5614391547622768,
+ "grad_norm": 0.7328057885169983,
+ "learning_rate": 9.34531495821375e-05,
+ "loss": 1.4479,
+ "step": 1561
+ },
+ {
+ "epoch": 1.5624394360914007,
+ "grad_norm": 0.6629959344863892,
+ "learning_rate": 9.334855350535645e-05,
+ "loss": 1.4399,
+ "step": 1562
+ },
+ {
+ "epoch": 1.5634397174205246,
+ "grad_norm": 0.5964148640632629,
+ "learning_rate": 9.324396473731217e-05,
+ "loss": 1.3064,
+ "step": 1563
+ },
+ {
+ "epoch": 1.5644399987496485,
+ "grad_norm": 0.6908231377601624,
+ "learning_rate": 9.313938339292883e-05,
+ "loss": 1.234,
+ "step": 1564
+ },
+ {
+ "epoch": 1.5654402800787721,
+ "grad_norm": 0.6026841402053833,
+ "learning_rate": 9.303480958712239e-05,
+ "loss": 1.2906,
+ "step": 1565
+ },
+ {
+ "epoch": 1.566440561407896,
+ "grad_norm": 0.5934796333312988,
+ "learning_rate": 9.293024343480055e-05,
+ "loss": 1.1448,
+ "step": 1566
+ },
+ {
+ "epoch": 1.5674408427370197,
+ "grad_norm": 0.6869467496871948,
+ "learning_rate": 9.282568505086261e-05,
+ "loss": 1.283,
+ "step": 1567
+ },
+ {
+ "epoch": 1.5684411240661436,
+ "grad_norm": 0.6558713912963867,
+ "learning_rate": 9.272113455019935e-05,
+ "loss": 1.502,
+ "step": 1568
+ },
+ {
+ "epoch": 1.5694414053952674,
+ "grad_norm": 0.6627963781356812,
+ "learning_rate": 9.261659204769284e-05,
+ "loss": 1.2985,
+ "step": 1569
+ },
+ {
+ "epoch": 1.5704416867243913,
+ "grad_norm": 0.7012712359428406,
+ "learning_rate": 9.251205765821636e-05,
+ "loss": 1.3899,
+ "step": 1570
+ },
+ {
+ "epoch": 1.5714419680535152,
+ "grad_norm": 0.6767538785934448,
+ "learning_rate": 9.240753149663433e-05,
+ "loss": 1.4869,
+ "step": 1571
+ },
+ {
+ "epoch": 1.5724422493826389,
+ "grad_norm": 0.6274527311325073,
+ "learning_rate": 9.230301367780208e-05,
+ "loss": 1.4491,
+ "step": 1572
+ },
+ {
+ "epoch": 1.5734425307117625,
+ "grad_norm": 0.6079627275466919,
+ "learning_rate": 9.219850431656579e-05,
+ "loss": 1.247,
+ "step": 1573
+ },
+ {
+ "epoch": 1.5744428120408864,
+ "grad_norm": 0.9947478175163269,
+ "learning_rate": 9.209400352776237e-05,
+ "loss": 1.3117,
+ "step": 1574
+ },
+ {
+ "epoch": 1.5754430933700103,
+ "grad_norm": 0.6441598534584045,
+ "learning_rate": 9.198951142621929e-05,
+ "loss": 1.2931,
+ "step": 1575
+ },
+ {
+ "epoch": 1.5764433746991342,
+ "grad_norm": 0.5995433926582336,
+ "learning_rate": 9.188502812675446e-05,
+ "loss": 1.3293,
+ "step": 1576
+ },
+ {
+ "epoch": 1.577443656028258,
+ "grad_norm": 0.5997470617294312,
+ "learning_rate": 9.178055374417612e-05,
+ "loss": 1.2542,
+ "step": 1577
+ },
+ {
+ "epoch": 1.5784439373573818,
+ "grad_norm": 0.7376891374588013,
+ "learning_rate": 9.167608839328272e-05,
+ "loss": 1.4369,
+ "step": 1578
+ },
+ {
+ "epoch": 1.5794442186865054,
+ "grad_norm": 0.7353281378746033,
+ "learning_rate": 9.15716321888628e-05,
+ "loss": 1.6255,
+ "step": 1579
+ },
+ {
+ "epoch": 1.5804445000156293,
+ "grad_norm": 0.6899515986442566,
+ "learning_rate": 9.146718524569487e-05,
+ "loss": 1.2246,
+ "step": 1580
+ },
+ {
+ "epoch": 1.5814447813447532,
+ "grad_norm": 0.6453947424888611,
+ "learning_rate": 9.136274767854716e-05,
+ "loss": 1.3967,
+ "step": 1581
+ },
+ {
+ "epoch": 1.582445062673877,
+ "grad_norm": 0.7168171405792236,
+ "learning_rate": 9.125831960217774e-05,
+ "loss": 1.3952,
+ "step": 1582
+ },
+ {
+ "epoch": 1.583445344003001,
+ "grad_norm": 0.6337130665779114,
+ "learning_rate": 9.115390113133414e-05,
+ "loss": 1.1972,
+ "step": 1583
+ },
+ {
+ "epoch": 1.5844456253321246,
+ "grad_norm": 0.7215299606323242,
+ "learning_rate": 9.104949238075336e-05,
+ "loss": 1.3696,
+ "step": 1584
+ },
+ {
+ "epoch": 1.5854459066612485,
+ "grad_norm": 0.7228485941886902,
+ "learning_rate": 9.094509346516178e-05,
+ "loss": 1.3858,
+ "step": 1585
+ },
+ {
+ "epoch": 1.5864461879903722,
+ "grad_norm": 0.6178514957427979,
+ "learning_rate": 9.084070449927488e-05,
+ "loss": 1.1581,
+ "step": 1586
+ },
+ {
+ "epoch": 1.587446469319496,
+ "grad_norm": 0.5726553201675415,
+ "learning_rate": 9.07363255977973e-05,
+ "loss": 1.265,
+ "step": 1587
+ },
+ {
+ "epoch": 1.58844675064862,
+ "grad_norm": 0.6116858124732971,
+ "learning_rate": 9.063195687542249e-05,
+ "loss": 1.3056,
+ "step": 1588
+ },
+ {
+ "epoch": 1.5894470319777438,
+ "grad_norm": 0.6042388677597046,
+ "learning_rate": 9.052759844683295e-05,
+ "loss": 1.3403,
+ "step": 1589
+ },
+ {
+ "epoch": 1.5904473133068675,
+ "grad_norm": 0.6811801791191101,
+ "learning_rate": 9.042325042669961e-05,
+ "loss": 1.4842,
+ "step": 1590
+ },
+ {
+ "epoch": 1.5914475946359914,
+ "grad_norm": 0.591401994228363,
+ "learning_rate": 9.03189129296821e-05,
+ "loss": 0.975,
+ "step": 1591
+ },
+ {
+ "epoch": 1.592447875965115,
+ "grad_norm": 0.5907956957817078,
+ "learning_rate": 9.021458607042845e-05,
+ "loss": 1.2758,
+ "step": 1592
+ },
+ {
+ "epoch": 1.593448157294239,
+ "grad_norm": 0.7272189259529114,
+ "learning_rate": 9.011026996357503e-05,
+ "loss": 1.3661,
+ "step": 1593
+ },
+ {
+ "epoch": 1.5944484386233628,
+ "grad_norm": 0.652340829372406,
+ "learning_rate": 9.000596472374637e-05,
+ "loss": 1.3547,
+ "step": 1594
+ },
+ {
+ "epoch": 1.5954487199524867,
+ "grad_norm": 0.8212108016014099,
+ "learning_rate": 8.990167046555504e-05,
+ "loss": 1.2207,
+ "step": 1595
+ },
+ {
+ "epoch": 1.5964490012816106,
+ "grad_norm": 0.6061079502105713,
+ "learning_rate": 8.97973873036016e-05,
+ "loss": 1.2555,
+ "step": 1596
+ },
+ {
+ "epoch": 1.5974492826107343,
+ "grad_norm": 0.5761566162109375,
+ "learning_rate": 8.969311535247438e-05,
+ "loss": 1.3051,
+ "step": 1597
+ },
+ {
+ "epoch": 1.598449563939858,
+ "grad_norm": 0.6714027523994446,
+ "learning_rate": 8.958885472674939e-05,
+ "loss": 1.4451,
+ "step": 1598
+ },
+ {
+ "epoch": 1.5994498452689818,
+ "grad_norm": 0.6701240539550781,
+ "learning_rate": 8.948460554099018e-05,
+ "loss": 1.4353,
+ "step": 1599
+ },
+ {
+ "epoch": 1.6004501265981057,
+ "grad_norm": 0.7223709225654602,
+ "learning_rate": 8.93803679097478e-05,
+ "loss": 1.4029,
+ "step": 1600
+ },
+ {
+ "epoch": 1.6014504079272296,
+ "grad_norm": 0.6414337158203125,
+ "learning_rate": 8.927614194756052e-05,
+ "loss": 1.304,
+ "step": 1601
+ },
+ {
+ "epoch": 1.6024506892563535,
+ "grad_norm": 0.6110413670539856,
+ "learning_rate": 8.917192776895382e-05,
+ "loss": 1.2504,
+ "step": 1602
+ },
+ {
+ "epoch": 1.6034509705854771,
+ "grad_norm": 0.6857700943946838,
+ "learning_rate": 8.906772548844026e-05,
+ "loss": 1.4735,
+ "step": 1603
+ },
+ {
+ "epoch": 1.604451251914601,
+ "grad_norm": 0.7300008535385132,
+ "learning_rate": 8.896353522051928e-05,
+ "loss": 1.753,
+ "step": 1604
+ },
+ {
+ "epoch": 1.6054515332437247,
+ "grad_norm": 0.6020368933677673,
+ "learning_rate": 8.885935707967716e-05,
+ "loss": 1.2869,
+ "step": 1605
+ },
+ {
+ "epoch": 1.6064518145728486,
+ "grad_norm": 0.5244629979133606,
+ "learning_rate": 8.875519118038684e-05,
+ "loss": 1.432,
+ "step": 1606
+ },
+ {
+ "epoch": 1.6074520959019725,
+ "grad_norm": 0.6224693059921265,
+ "learning_rate": 8.865103763710777e-05,
+ "loss": 1.6618,
+ "step": 1607
+ },
+ {
+ "epoch": 1.6084523772310964,
+ "grad_norm": 0.6111294627189636,
+ "learning_rate": 8.854689656428591e-05,
+ "loss": 1.4098,
+ "step": 1608
+ },
+ {
+ "epoch": 1.60945265856022,
+ "grad_norm": 0.6179168820381165,
+ "learning_rate": 8.844276807635343e-05,
+ "loss": 1.1749,
+ "step": 1609
+ },
+ {
+ "epoch": 1.610452939889344,
+ "grad_norm": 0.658416748046875,
+ "learning_rate": 8.833865228772871e-05,
+ "loss": 1.3686,
+ "step": 1610
+ },
+ {
+ "epoch": 1.6114532212184676,
+ "grad_norm": 0.5781399607658386,
+ "learning_rate": 8.823454931281616e-05,
+ "loss": 1.3618,
+ "step": 1611
+ },
+ {
+ "epoch": 1.6124535025475915,
+ "grad_norm": 0.6480880975723267,
+ "learning_rate": 8.813045926600615e-05,
+ "loss": 1.4162,
+ "step": 1612
+ },
+ {
+ "epoch": 1.6134537838767153,
+ "grad_norm": 0.5510106682777405,
+ "learning_rate": 8.802638226167479e-05,
+ "loss": 1.1625,
+ "step": 1613
+ },
+ {
+ "epoch": 1.6144540652058392,
+ "grad_norm": 0.6937603950500488,
+ "learning_rate": 8.792231841418391e-05,
+ "loss": 1.462,
+ "step": 1614
+ },
+ {
+ "epoch": 1.6154543465349631,
+ "grad_norm": 0.8652899265289307,
+ "learning_rate": 8.781826783788084e-05,
+ "loss": 1.389,
+ "step": 1615
+ },
+ {
+ "epoch": 1.6164546278640868,
+ "grad_norm": 0.6258351802825928,
+ "learning_rate": 8.771423064709837e-05,
+ "loss": 1.322,
+ "step": 1616
+ },
+ {
+ "epoch": 1.6174549091932104,
+ "grad_norm": 0.6774043440818787,
+ "learning_rate": 8.76102069561545e-05,
+ "loss": 1.3344,
+ "step": 1617
+ },
+ {
+ "epoch": 1.6184551905223343,
+ "grad_norm": 0.6406411528587341,
+ "learning_rate": 8.750619687935251e-05,
+ "loss": 1.3929,
+ "step": 1618
+ },
+ {
+ "epoch": 1.6194554718514582,
+ "grad_norm": 0.6380543112754822,
+ "learning_rate": 8.740220053098067e-05,
+ "loss": 1.413,
+ "step": 1619
+ },
+ {
+ "epoch": 1.620455753180582,
+ "grad_norm": 0.7143612504005432,
+ "learning_rate": 8.729821802531212e-05,
+ "loss": 1.541,
+ "step": 1620
+ },
+ {
+ "epoch": 1.621456034509706,
+ "grad_norm": 0.5897488594055176,
+ "learning_rate": 8.719424947660487e-05,
+ "loss": 1.2968,
+ "step": 1621
+ },
+ {
+ "epoch": 1.6224563158388297,
+ "grad_norm": 0.6275039911270142,
+ "learning_rate": 8.70902949991015e-05,
+ "loss": 1.4192,
+ "step": 1622
+ },
+ {
+ "epoch": 1.6234565971679535,
+ "grad_norm": 0.7218581438064575,
+ "learning_rate": 8.698635470702923e-05,
+ "loss": 1.41,
+ "step": 1623
+ },
+ {
+ "epoch": 1.6244568784970772,
+ "grad_norm": 0.7635208964347839,
+ "learning_rate": 8.688242871459963e-05,
+ "loss": 1.3006,
+ "step": 1624
+ },
+ {
+ "epoch": 1.625457159826201,
+ "grad_norm": 0.746904730796814,
+ "learning_rate": 8.677851713600855e-05,
+ "loss": 1.7362,
+ "step": 1625
+ },
+ {
+ "epoch": 1.626457441155325,
+ "grad_norm": 0.6612946391105652,
+ "learning_rate": 8.667462008543603e-05,
+ "loss": 1.6304,
+ "step": 1626
+ },
+ {
+ "epoch": 1.6274577224844489,
+ "grad_norm": 0.6576234698295593,
+ "learning_rate": 8.657073767704615e-05,
+ "loss": 1.4814,
+ "step": 1627
+ },
+ {
+ "epoch": 1.6284580038135725,
+ "grad_norm": 0.6360548138618469,
+ "learning_rate": 8.646687002498692e-05,
+ "loss": 1.4134,
+ "step": 1628
+ },
+ {
+ "epoch": 1.6294582851426964,
+ "grad_norm": 0.6985300183296204,
+ "learning_rate": 8.636301724339004e-05,
+ "loss": 1.4916,
+ "step": 1629
+ },
+ {
+ "epoch": 1.63045856647182,
+ "grad_norm": 0.5798565149307251,
+ "learning_rate": 8.625917944637096e-05,
+ "loss": 1.4304,
+ "step": 1630
+ },
+ {
+ "epoch": 1.631458847800944,
+ "grad_norm": 0.6414662599563599,
+ "learning_rate": 8.615535674802865e-05,
+ "loss": 1.4268,
+ "step": 1631
+ },
+ {
+ "epoch": 1.6324591291300679,
+ "grad_norm": 0.6145825386047363,
+ "learning_rate": 8.605154926244543e-05,
+ "loss": 1.1465,
+ "step": 1632
+ },
+ {
+ "epoch": 1.6334594104591917,
+ "grad_norm": 0.6959055662155151,
+ "learning_rate": 8.594775710368704e-05,
+ "loss": 1.4171,
+ "step": 1633
+ },
+ {
+ "epoch": 1.6344596917883156,
+ "grad_norm": 0.6742627024650574,
+ "learning_rate": 8.584398038580226e-05,
+ "loss": 1.4406,
+ "step": 1634
+ },
+ {
+ "epoch": 1.6354599731174393,
+ "grad_norm": 0.6703431606292725,
+ "learning_rate": 8.574021922282292e-05,
+ "loss": 1.423,
+ "step": 1635
+ },
+ {
+ "epoch": 1.636460254446563,
+ "grad_norm": 0.6874369382858276,
+ "learning_rate": 8.563647372876378e-05,
+ "loss": 1.3816,
+ "step": 1636
+ },
+ {
+ "epoch": 1.6374605357756868,
+ "grad_norm": 0.6975083351135254,
+ "learning_rate": 8.553274401762237e-05,
+ "loss": 1.3082,
+ "step": 1637
+ },
+ {
+ "epoch": 1.6384608171048107,
+ "grad_norm": 0.6560392379760742,
+ "learning_rate": 8.542903020337887e-05,
+ "loss": 1.4106,
+ "step": 1638
+ },
+ {
+ "epoch": 1.6394610984339346,
+ "grad_norm": 0.7531464099884033,
+ "learning_rate": 8.532533239999602e-05,
+ "loss": 1.1798,
+ "step": 1639
+ },
+ {
+ "epoch": 1.6404613797630585,
+ "grad_norm": 0.683595597743988,
+ "learning_rate": 8.522165072141897e-05,
+ "loss": 1.2502,
+ "step": 1640
+ },
+ {
+ "epoch": 1.6414616610921822,
+ "grad_norm": 0.6005716323852539,
+ "learning_rate": 8.511798528157512e-05,
+ "loss": 1.1569,
+ "step": 1641
+ },
+ {
+ "epoch": 1.6424619424213058,
+ "grad_norm": 0.5878857374191284,
+ "learning_rate": 8.501433619437403e-05,
+ "loss": 1.1993,
+ "step": 1642
+ },
+ {
+ "epoch": 1.6434622237504297,
+ "grad_norm": 0.6806159615516663,
+ "learning_rate": 8.49107035737073e-05,
+ "loss": 1.48,
+ "step": 1643
+ },
+ {
+ "epoch": 1.6444625050795536,
+ "grad_norm": 0.6241198182106018,
+ "learning_rate": 8.480708753344846e-05,
+ "loss": 1.5493,
+ "step": 1644
+ },
+ {
+ "epoch": 1.6454627864086775,
+ "grad_norm": 0.6669902205467224,
+ "learning_rate": 8.470348818745278e-05,
+ "loss": 1.3893,
+ "step": 1645
+ },
+ {
+ "epoch": 1.6464630677378014,
+ "grad_norm": 0.7545666694641113,
+ "learning_rate": 8.459990564955721e-05,
+ "loss": 1.2774,
+ "step": 1646
+ },
+ {
+ "epoch": 1.647463349066925,
+ "grad_norm": 0.7358554601669312,
+ "learning_rate": 8.449634003358022e-05,
+ "loss": 1.4932,
+ "step": 1647
+ },
+ {
+ "epoch": 1.648463630396049,
+ "grad_norm": 0.5833718180656433,
+ "learning_rate": 8.43927914533217e-05,
+ "loss": 1.447,
+ "step": 1648
+ },
+ {
+ "epoch": 1.6494639117251726,
+ "grad_norm": 0.7357178330421448,
+ "learning_rate": 8.428926002256283e-05,
+ "loss": 1.5369,
+ "step": 1649
+ },
+ {
+ "epoch": 1.6504641930542965,
+ "grad_norm": 0.6580341458320618,
+ "learning_rate": 8.418574585506591e-05,
+ "loss": 1.5551,
+ "step": 1650
+ },
+ {
+ "epoch": 1.6514644743834204,
+ "grad_norm": 0.637784481048584,
+ "learning_rate": 8.408224906457429e-05,
+ "loss": 1.3233,
+ "step": 1651
+ },
+ {
+ "epoch": 1.6524647557125443,
+ "grad_norm": 0.6412131190299988,
+ "learning_rate": 8.397876976481224e-05,
+ "loss": 1.15,
+ "step": 1652
+ },
+ {
+ "epoch": 1.653465037041668,
+ "grad_norm": 0.6500189900398254,
+ "learning_rate": 8.387530806948476e-05,
+ "loss": 1.3027,
+ "step": 1653
+ },
+ {
+ "epoch": 1.6544653183707918,
+ "grad_norm": 0.6529775857925415,
+ "learning_rate": 8.37718640922776e-05,
+ "loss": 1.4015,
+ "step": 1654
+ },
+ {
+ "epoch": 1.6554655996999155,
+ "grad_norm": 0.5686854720115662,
+ "learning_rate": 8.366843794685695e-05,
+ "loss": 1.3276,
+ "step": 1655
+ },
+ {
+ "epoch": 1.6564658810290394,
+ "grad_norm": 0.6397770643234253,
+ "learning_rate": 8.356502974686941e-05,
+ "loss": 1.2586,
+ "step": 1656
+ },
+ {
+ "epoch": 1.6574661623581632,
+ "grad_norm": 0.688079297542572,
+ "learning_rate": 8.346163960594193e-05,
+ "loss": 1.4179,
+ "step": 1657
+ },
+ {
+ "epoch": 1.6584664436872871,
+ "grad_norm": 0.8436989784240723,
+ "learning_rate": 8.335826763768156e-05,
+ "loss": 1.3527,
+ "step": 1658
+ },
+ {
+ "epoch": 1.659466725016411,
+ "grad_norm": 0.6351317763328552,
+ "learning_rate": 8.325491395567541e-05,
+ "loss": 1.2692,
+ "step": 1659
+ },
+ {
+ "epoch": 1.6604670063455347,
+ "grad_norm": 0.6486929059028625,
+ "learning_rate": 8.315157867349046e-05,
+ "loss": 1.3388,
+ "step": 1660
+ },
+ {
+ "epoch": 1.6614672876746583,
+ "grad_norm": 0.6507102847099304,
+ "learning_rate": 8.30482619046735e-05,
+ "loss": 1.3335,
+ "step": 1661
+ },
+ {
+ "epoch": 1.6624675690037822,
+ "grad_norm": 0.5884740948677063,
+ "learning_rate": 8.294496376275104e-05,
+ "loss": 1.3208,
+ "step": 1662
+ },
+ {
+ "epoch": 1.6634678503329061,
+ "grad_norm": 0.6564494371414185,
+ "learning_rate": 8.284168436122898e-05,
+ "loss": 1.164,
+ "step": 1663
+ },
+ {
+ "epoch": 1.66446813166203,
+ "grad_norm": 0.738129198551178,
+ "learning_rate": 8.273842381359273e-05,
+ "loss": 1.5263,
+ "step": 1664
+ },
+ {
+ "epoch": 1.665468412991154,
+ "grad_norm": 0.6807828545570374,
+ "learning_rate": 8.263518223330697e-05,
+ "loss": 1.3342,
+ "step": 1665
+ },
+ {
+ "epoch": 1.6664686943202776,
+ "grad_norm": 0.6691699624061584,
+ "learning_rate": 8.253195973381552e-05,
+ "loss": 1.3643,
+ "step": 1666
+ },
+ {
+ "epoch": 1.6674689756494014,
+ "grad_norm": 0.7294644117355347,
+ "learning_rate": 8.242875642854121e-05,
+ "loss": 1.3019,
+ "step": 1667
+ },
+ {
+ "epoch": 1.668469256978525,
+ "grad_norm": 0.9484224915504456,
+ "learning_rate": 8.232557243088585e-05,
+ "loss": 1.568,
+ "step": 1668
+ },
+ {
+ "epoch": 1.669469538307649,
+ "grad_norm": 0.6717697978019714,
+ "learning_rate": 8.222240785422996e-05,
+ "loss": 1.4427,
+ "step": 1669
+ },
+ {
+ "epoch": 1.6704698196367729,
+ "grad_norm": 0.6472289562225342,
+ "learning_rate": 8.211926281193277e-05,
+ "loss": 1.3165,
+ "step": 1670
+ },
+ {
+ "epoch": 1.6714701009658968,
+ "grad_norm": 0.6698246598243713,
+ "learning_rate": 8.201613741733203e-05,
+ "loss": 1.5418,
+ "step": 1671
+ },
+ {
+ "epoch": 1.6724703822950204,
+ "grad_norm": 0.6673927307128906,
+ "learning_rate": 8.191303178374389e-05,
+ "loss": 1.2539,
+ "step": 1672
+ },
+ {
+ "epoch": 1.6734706636241443,
+ "grad_norm": 0.659993588924408,
+ "learning_rate": 8.180994602446279e-05,
+ "loss": 1.4708,
+ "step": 1673
+ },
+ {
+ "epoch": 1.674470944953268,
+ "grad_norm": 0.6471976041793823,
+ "learning_rate": 8.170688025276134e-05,
+ "loss": 1.2014,
+ "step": 1674
+ },
+ {
+ "epoch": 1.6754712262823919,
+ "grad_norm": 0.6816028952598572,
+ "learning_rate": 8.160383458189022e-05,
+ "loss": 1.4187,
+ "step": 1675
+ },
+ {
+ "epoch": 1.6764715076115158,
+ "grad_norm": 0.747825562953949,
+ "learning_rate": 8.15008091250779e-05,
+ "loss": 1.4394,
+ "step": 1676
+ },
+ {
+ "epoch": 1.6774717889406396,
+ "grad_norm": 0.6439304947853088,
+ "learning_rate": 8.13978039955308e-05,
+ "loss": 1.3567,
+ "step": 1677
+ },
+ {
+ "epoch": 1.6784720702697635,
+ "grad_norm": 0.7007876634597778,
+ "learning_rate": 8.12948193064329e-05,
+ "loss": 1.3913,
+ "step": 1678
+ },
+ {
+ "epoch": 1.6794723515988872,
+ "grad_norm": 0.6919410228729248,
+ "learning_rate": 8.119185517094578e-05,
+ "loss": 1.217,
+ "step": 1679
+ },
+ {
+ "epoch": 1.6804726329280109,
+ "grad_norm": 0.59043288230896,
+ "learning_rate": 8.108891170220836e-05,
+ "loss": 1.2202,
+ "step": 1680
+ },
+ {
+ "epoch": 1.6814729142571347,
+ "grad_norm": 0.6019158363342285,
+ "learning_rate": 8.098598901333692e-05,
+ "loss": 1.2729,
+ "step": 1681
+ },
+ {
+ "epoch": 1.6824731955862586,
+ "grad_norm": 0.7911222577095032,
+ "learning_rate": 8.088308721742491e-05,
+ "loss": 1.3911,
+ "step": 1682
+ },
+ {
+ "epoch": 1.6834734769153825,
+ "grad_norm": 0.6759652495384216,
+ "learning_rate": 8.078020642754274e-05,
+ "loss": 1.3394,
+ "step": 1683
+ },
+ {
+ "epoch": 1.6844737582445064,
+ "grad_norm": 0.6729623675346375,
+ "learning_rate": 8.06773467567378e-05,
+ "loss": 1.2622,
+ "step": 1684
+ },
+ {
+ "epoch": 1.68547403957363,
+ "grad_norm": 0.7148420214653015,
+ "learning_rate": 8.057450831803428e-05,
+ "loss": 1.363,
+ "step": 1685
+ },
+ {
+ "epoch": 1.686474320902754,
+ "grad_norm": 0.6276561617851257,
+ "learning_rate": 8.047169122443302e-05,
+ "loss": 1.4551,
+ "step": 1686
+ },
+ {
+ "epoch": 1.6874746022318776,
+ "grad_norm": 0.6642428636550903,
+ "learning_rate": 8.036889558891142e-05,
+ "loss": 1.5254,
+ "step": 1687
+ },
+ {
+ "epoch": 1.6884748835610015,
+ "grad_norm": 0.6483539342880249,
+ "learning_rate": 8.026612152442329e-05,
+ "loss": 1.4619,
+ "step": 1688
+ },
+ {
+ "epoch": 1.6894751648901254,
+ "grad_norm": 0.799802303314209,
+ "learning_rate": 8.016336914389874e-05,
+ "loss": 1.5548,
+ "step": 1689
+ },
+ {
+ "epoch": 1.6904754462192493,
+ "grad_norm": 0.6891320943832397,
+ "learning_rate": 8.006063856024405e-05,
+ "loss": 1.3208,
+ "step": 1690
+ },
+ {
+ "epoch": 1.691475727548373,
+ "grad_norm": 0.6820452213287354,
+ "learning_rate": 7.995792988634152e-05,
+ "loss": 1.25,
+ "step": 1691
+ },
+ {
+ "epoch": 1.6924760088774968,
+ "grad_norm": 0.6455455422401428,
+ "learning_rate": 7.985524323504948e-05,
+ "loss": 1.4092,
+ "step": 1692
+ },
+ {
+ "epoch": 1.6934762902066205,
+ "grad_norm": 0.5509824156761169,
+ "learning_rate": 7.975257871920195e-05,
+ "loss": 1.2324,
+ "step": 1693
+ },
+ {
+ "epoch": 1.6944765715357444,
+ "grad_norm": 0.665798544883728,
+ "learning_rate": 7.964993645160866e-05,
+ "loss": 1.318,
+ "step": 1694
+ },
+ {
+ "epoch": 1.6954768528648683,
+ "grad_norm": 0.5200991034507751,
+ "learning_rate": 7.954731654505491e-05,
+ "loss": 1.1431,
+ "step": 1695
+ },
+ {
+ "epoch": 1.6964771341939922,
+ "grad_norm": 0.7394373416900635,
+ "learning_rate": 7.944471911230142e-05,
+ "loss": 1.5046,
+ "step": 1696
+ },
+ {
+ "epoch": 1.697477415523116,
+ "grad_norm": 0.7250887155532837,
+ "learning_rate": 7.93421442660842e-05,
+ "loss": 1.6707,
+ "step": 1697
+ },
+ {
+ "epoch": 1.6984776968522397,
+ "grad_norm": 0.6814633011817932,
+ "learning_rate": 7.923959211911449e-05,
+ "loss": 1.3375,
+ "step": 1698
+ },
+ {
+ "epoch": 1.6994779781813634,
+ "grad_norm": 0.8540093302726746,
+ "learning_rate": 7.91370627840785e-05,
+ "loss": 1.4843,
+ "step": 1699
+ },
+ {
+ "epoch": 1.7004782595104873,
+ "grad_norm": 0.596825122833252,
+ "learning_rate": 7.903455637363746e-05,
+ "loss": 1.2795,
+ "step": 1700
+ },
+ {
+ "epoch": 1.7014785408396111,
+ "grad_norm": 0.650389552116394,
+ "learning_rate": 7.89320730004274e-05,
+ "loss": 1.4164,
+ "step": 1701
+ },
+ {
+ "epoch": 1.702478822168735,
+ "grad_norm": 0.6001396179199219,
+ "learning_rate": 7.882961277705895e-05,
+ "loss": 1.3107,
+ "step": 1702
+ },
+ {
+ "epoch": 1.703479103497859,
+ "grad_norm": 0.6638504266738892,
+ "learning_rate": 7.872717581611741e-05,
+ "loss": 1.3454,
+ "step": 1703
+ },
+ {
+ "epoch": 1.7044793848269826,
+ "grad_norm": 0.6506242752075195,
+ "learning_rate": 7.862476223016246e-05,
+ "loss": 1.3095,
+ "step": 1704
+ },
+ {
+ "epoch": 1.7054796661561062,
+ "grad_norm": 0.6528734564781189,
+ "learning_rate": 7.852237213172812e-05,
+ "loss": 1.3354,
+ "step": 1705
+ },
+ {
+ "epoch": 1.7064799474852301,
+ "grad_norm": 0.6626534461975098,
+ "learning_rate": 7.842000563332254e-05,
+ "loss": 1.2478,
+ "step": 1706
+ },
+ {
+ "epoch": 1.707480228814354,
+ "grad_norm": 0.6849489212036133,
+ "learning_rate": 7.831766284742807e-05,
+ "loss": 1.4748,
+ "step": 1707
+ },
+ {
+ "epoch": 1.708480510143478,
+ "grad_norm": 0.6511324048042297,
+ "learning_rate": 7.82153438865009e-05,
+ "loss": 1.3912,
+ "step": 1708
+ },
+ {
+ "epoch": 1.7094807914726018,
+ "grad_norm": 0.610500156879425,
+ "learning_rate": 7.811304886297104e-05,
+ "loss": 1.5791,
+ "step": 1709
+ },
+ {
+ "epoch": 1.7104810728017255,
+ "grad_norm": 0.6765373945236206,
+ "learning_rate": 7.801077788924224e-05,
+ "loss": 1.3725,
+ "step": 1710
+ },
+ {
+ "epoch": 1.7114813541308493,
+ "grad_norm": 0.7581295371055603,
+ "learning_rate": 7.790853107769179e-05,
+ "loss": 1.4736,
+ "step": 1711
+ },
+ {
+ "epoch": 1.712481635459973,
+ "grad_norm": 0.7430140376091003,
+ "learning_rate": 7.780630854067045e-05,
+ "loss": 1.4055,
+ "step": 1712
+ },
+ {
+ "epoch": 1.713481916789097,
+ "grad_norm": 0.6995111107826233,
+ "learning_rate": 7.77041103905023e-05,
+ "loss": 1.4391,
+ "step": 1713
+ },
+ {
+ "epoch": 1.7144821981182208,
+ "grad_norm": 0.6253392100334167,
+ "learning_rate": 7.760193673948461e-05,
+ "loss": 1.3054,
+ "step": 1714
+ },
+ {
+ "epoch": 1.7154824794473447,
+ "grad_norm": 0.6570398211479187,
+ "learning_rate": 7.749978769988778e-05,
+ "loss": 1.328,
+ "step": 1715
+ },
+ {
+ "epoch": 1.7164827607764683,
+ "grad_norm": 0.6701228022575378,
+ "learning_rate": 7.739766338395511e-05,
+ "loss": 1.4463,
+ "step": 1716
+ },
+ {
+ "epoch": 1.7174830421055922,
+ "grad_norm": 0.6792857050895691,
+ "learning_rate": 7.729556390390275e-05,
+ "loss": 1.3868,
+ "step": 1717
+ },
+ {
+ "epoch": 1.7184833234347159,
+ "grad_norm": 0.6209396123886108,
+ "learning_rate": 7.719348937191957e-05,
+ "loss": 1.4349,
+ "step": 1718
+ },
+ {
+ "epoch": 1.7194836047638398,
+ "grad_norm": 0.6768544316291809,
+ "learning_rate": 7.709143990016702e-05,
+ "loss": 1.3351,
+ "step": 1719
+ },
+ {
+ "epoch": 1.7204838860929637,
+ "grad_norm": 0.6399651765823364,
+ "learning_rate": 7.698941560077899e-05,
+ "loss": 1.0941,
+ "step": 1720
+ },
+ {
+ "epoch": 1.7214841674220875,
+ "grad_norm": 0.727689802646637,
+ "learning_rate": 7.688741658586178e-05,
+ "loss": 1.6035,
+ "step": 1721
+ },
+ {
+ "epoch": 1.7224844487512114,
+ "grad_norm": 0.7155159711837769,
+ "learning_rate": 7.678544296749384e-05,
+ "loss": 1.3062,
+ "step": 1722
+ },
+ {
+ "epoch": 1.723484730080335,
+ "grad_norm": 0.7038417458534241,
+ "learning_rate": 7.668349485772572e-05,
+ "loss": 1.3564,
+ "step": 1723
+ },
+ {
+ "epoch": 1.7244850114094588,
+ "grad_norm": 0.6355061531066895,
+ "learning_rate": 7.658157236857999e-05,
+ "loss": 1.2889,
+ "step": 1724
+ },
+ {
+ "epoch": 1.7254852927385826,
+ "grad_norm": 0.6973921060562134,
+ "learning_rate": 7.6479675612051e-05,
+ "loss": 1.4768,
+ "step": 1725
+ },
+ {
+ "epoch": 1.7264855740677065,
+ "grad_norm": 0.6392905712127686,
+ "learning_rate": 7.637780470010487e-05,
+ "loss": 1.2648,
+ "step": 1726
+ },
+ {
+ "epoch": 1.7274858553968304,
+ "grad_norm": 0.6824831962585449,
+ "learning_rate": 7.62759597446793e-05,
+ "loss": 1.3659,
+ "step": 1727
+ },
+ {
+ "epoch": 1.7284861367259543,
+ "grad_norm": 0.6261188983917236,
+ "learning_rate": 7.617414085768351e-05,
+ "loss": 1.1169,
+ "step": 1728
+ },
+ {
+ "epoch": 1.729486418055078,
+ "grad_norm": 0.749819815158844,
+ "learning_rate": 7.607234815099802e-05,
+ "loss": 1.5689,
+ "step": 1729
+ },
+ {
+ "epoch": 1.7304866993842019,
+ "grad_norm": 0.6438270211219788,
+ "learning_rate": 7.597058173647458e-05,
+ "loss": 1.5213,
+ "step": 1730
+ },
+ {
+ "epoch": 1.7314869807133255,
+ "grad_norm": 0.6761091351509094,
+ "learning_rate": 7.586884172593609e-05,
+ "loss": 1.4573,
+ "step": 1731
+ },
+ {
+ "epoch": 1.7324872620424494,
+ "grad_norm": 0.7616665363311768,
+ "learning_rate": 7.576712823117645e-05,
+ "loss": 1.362,
+ "step": 1732
+ },
+ {
+ "epoch": 1.7334875433715733,
+ "grad_norm": 0.6882945895195007,
+ "learning_rate": 7.566544136396037e-05,
+ "loss": 1.3095,
+ "step": 1733
+ },
+ {
+ "epoch": 1.7344878247006972,
+ "grad_norm": 0.6496737003326416,
+ "learning_rate": 7.556378123602334e-05,
+ "loss": 1.7484,
+ "step": 1734
+ },
+ {
+ "epoch": 1.7354881060298208,
+ "grad_norm": 0.5761104822158813,
+ "learning_rate": 7.54621479590714e-05,
+ "loss": 1.1494,
+ "step": 1735
+ },
+ {
+ "epoch": 1.7364883873589447,
+ "grad_norm": 0.5761191248893738,
+ "learning_rate": 7.536054164478123e-05,
+ "loss": 1.235,
+ "step": 1736
+ },
+ {
+ "epoch": 1.7374886686880684,
+ "grad_norm": 0.6114856600761414,
+ "learning_rate": 7.525896240479976e-05,
+ "loss": 1.2359,
+ "step": 1737
+ },
+ {
+ "epoch": 1.7384889500171923,
+ "grad_norm": 0.6941234469413757,
+ "learning_rate": 7.51574103507442e-05,
+ "loss": 1.2464,
+ "step": 1738
+ },
+ {
+ "epoch": 1.7394892313463162,
+ "grad_norm": 0.8174465298652649,
+ "learning_rate": 7.505588559420189e-05,
+ "loss": 1.4083,
+ "step": 1739
+ },
+ {
+ "epoch": 1.74048951267544,
+ "grad_norm": 0.6795939207077026,
+ "learning_rate": 7.495438824673016e-05,
+ "loss": 1.1648,
+ "step": 1740
+ },
+ {
+ "epoch": 1.741489794004564,
+ "grad_norm": 0.7111831307411194,
+ "learning_rate": 7.485291841985626e-05,
+ "loss": 1.4835,
+ "step": 1741
+ },
+ {
+ "epoch": 1.7424900753336876,
+ "grad_norm": 0.6238039135932922,
+ "learning_rate": 7.475147622507717e-05,
+ "loss": 1.1049,
+ "step": 1742
+ },
+ {
+ "epoch": 1.7434903566628113,
+ "grad_norm": 0.6977236270904541,
+ "learning_rate": 7.465006177385953e-05,
+ "loss": 1.2669,
+ "step": 1743
+ },
+ {
+ "epoch": 1.7444906379919352,
+ "grad_norm": 0.5952944159507751,
+ "learning_rate": 7.454867517763948e-05,
+ "loss": 1.3034,
+ "step": 1744
+ },
+ {
+ "epoch": 1.745490919321059,
+ "grad_norm": 0.6380055546760559,
+ "learning_rate": 7.444731654782253e-05,
+ "loss": 1.348,
+ "step": 1745
+ },
+ {
+ "epoch": 1.746491200650183,
+ "grad_norm": 0.7760757207870483,
+ "learning_rate": 7.434598599578351e-05,
+ "loss": 1.4662,
+ "step": 1746
+ },
+ {
+ "epoch": 1.7474914819793068,
+ "grad_norm": 0.6411606073379517,
+ "learning_rate": 7.424468363286634e-05,
+ "loss": 1.3775,
+ "step": 1747
+ },
+ {
+ "epoch": 1.7484917633084305,
+ "grad_norm": 0.6461936235427856,
+ "learning_rate": 7.414340957038406e-05,
+ "loss": 1.53,
+ "step": 1748
+ },
+ {
+ "epoch": 1.7494920446375544,
+ "grad_norm": 0.6881558299064636,
+ "learning_rate": 7.404216391961847e-05,
+ "loss": 1.2593,
+ "step": 1749
+ },
+ {
+ "epoch": 1.750492325966678,
+ "grad_norm": 0.6212877631187439,
+ "learning_rate": 7.394094679182024e-05,
+ "loss": 1.3894,
+ "step": 1750
+ },
+ {
+ "epoch": 1.751492607295802,
+ "grad_norm": 0.6928493976593018,
+ "learning_rate": 7.383975829820874e-05,
+ "loss": 1.4044,
+ "step": 1751
+ },
+ {
+ "epoch": 1.7524928886249258,
+ "grad_norm": 0.8617000579833984,
+ "learning_rate": 7.37385985499718e-05,
+ "loss": 1.6135,
+ "step": 1752
+ },
+ {
+ "epoch": 1.7534931699540497,
+ "grad_norm": 0.5414397716522217,
+ "learning_rate": 7.36374676582657e-05,
+ "loss": 1.2033,
+ "step": 1753
+ },
+ {
+ "epoch": 1.7544934512831734,
+ "grad_norm": 0.6777795553207397,
+ "learning_rate": 7.353636573421496e-05,
+ "loss": 1.2382,
+ "step": 1754
+ },
+ {
+ "epoch": 1.7554937326122972,
+ "grad_norm": 0.6595777273178101,
+ "learning_rate": 7.343529288891239e-05,
+ "loss": 1.3224,
+ "step": 1755
+ },
+ {
+ "epoch": 1.756494013941421,
+ "grad_norm": 0.6468746066093445,
+ "learning_rate": 7.333424923341868e-05,
+ "loss": 1.3266,
+ "step": 1756
+ },
+ {
+ "epoch": 1.7574942952705448,
+ "grad_norm": 0.6333887577056885,
+ "learning_rate": 7.323323487876257e-05,
+ "loss": 1.2204,
+ "step": 1757
+ },
+ {
+ "epoch": 1.7584945765996687,
+ "grad_norm": 0.6916175484657288,
+ "learning_rate": 7.313224993594057e-05,
+ "loss": 1.2936,
+ "step": 1758
+ },
+ {
+ "epoch": 1.7594948579287926,
+ "grad_norm": 0.6844228506088257,
+ "learning_rate": 7.303129451591686e-05,
+ "loss": 1.3675,
+ "step": 1759
+ },
+ {
+ "epoch": 1.7604951392579165,
+ "grad_norm": 0.774632453918457,
+ "learning_rate": 7.29303687296232e-05,
+ "loss": 1.4389,
+ "step": 1760
+ },
+ {
+ "epoch": 1.7614954205870401,
+ "grad_norm": 0.648438572883606,
+ "learning_rate": 7.282947268795877e-05,
+ "loss": 1.3824,
+ "step": 1761
+ },
+ {
+ "epoch": 1.7624957019161638,
+ "grad_norm": 0.6045622229576111,
+ "learning_rate": 7.272860650179006e-05,
+ "loss": 1.5047,
+ "step": 1762
+ },
+ {
+ "epoch": 1.7634959832452877,
+ "grad_norm": 0.5664568543434143,
+ "learning_rate": 7.262777028195081e-05,
+ "loss": 1.2122,
+ "step": 1763
+ },
+ {
+ "epoch": 1.7644962645744116,
+ "grad_norm": 0.6108324527740479,
+ "learning_rate": 7.252696413924174e-05,
+ "loss": 1.1857,
+ "step": 1764
+ },
+ {
+ "epoch": 1.7654965459035354,
+ "grad_norm": 0.6443206667900085,
+ "learning_rate": 7.242618818443056e-05,
+ "loss": 1.3842,
+ "step": 1765
+ },
+ {
+ "epoch": 1.7664968272326593,
+ "grad_norm": 0.7583184242248535,
+ "learning_rate": 7.232544252825189e-05,
+ "loss": 1.2639,
+ "step": 1766
+ },
+ {
+ "epoch": 1.767497108561783,
+ "grad_norm": 1.4318912029266357,
+ "learning_rate": 7.222472728140695e-05,
+ "loss": 1.0818,
+ "step": 1767
+ },
+ {
+ "epoch": 1.7684973898909067,
+ "grad_norm": 0.7584578394889832,
+ "learning_rate": 7.212404255456357e-05,
+ "loss": 1.3726,
+ "step": 1768
+ },
+ {
+ "epoch": 1.7694976712200305,
+ "grad_norm": 0.6667199730873108,
+ "learning_rate": 7.202338845835606e-05,
+ "loss": 1.4032,
+ "step": 1769
+ },
+ {
+ "epoch": 1.7704979525491544,
+ "grad_norm": 0.6628596782684326,
+ "learning_rate": 7.192276510338507e-05,
+ "loss": 1.3554,
+ "step": 1770
+ },
+ {
+ "epoch": 1.7714982338782783,
+ "grad_norm": 0.6599582433700562,
+ "learning_rate": 7.182217260021749e-05,
+ "loss": 1.4631,
+ "step": 1771
+ },
+ {
+ "epoch": 1.7724985152074022,
+ "grad_norm": 0.6616823077201843,
+ "learning_rate": 7.172161105938624e-05,
+ "loss": 1.2651,
+ "step": 1772
+ },
+ {
+ "epoch": 1.7734987965365259,
+ "grad_norm": 0.7346659898757935,
+ "learning_rate": 7.162108059139032e-05,
+ "loss": 1.2884,
+ "step": 1773
+ },
+ {
+ "epoch": 1.7744990778656498,
+ "grad_norm": 0.7243573069572449,
+ "learning_rate": 7.15205813066945e-05,
+ "loss": 1.5171,
+ "step": 1774
+ },
+ {
+ "epoch": 1.7754993591947734,
+ "grad_norm": 0.7012961506843567,
+ "learning_rate": 7.142011331572936e-05,
+ "loss": 1.3929,
+ "step": 1775
+ },
+ {
+ "epoch": 1.7764996405238973,
+ "grad_norm": 0.7035613059997559,
+ "learning_rate": 7.131967672889101e-05,
+ "loss": 1.3625,
+ "step": 1776
+ },
+ {
+ "epoch": 1.7774999218530212,
+ "grad_norm": 0.6865414977073669,
+ "learning_rate": 7.121927165654109e-05,
+ "loss": 1.34,
+ "step": 1777
+ },
+ {
+ "epoch": 1.778500203182145,
+ "grad_norm": 0.6221490502357483,
+ "learning_rate": 7.111889820900664e-05,
+ "loss": 1.4009,
+ "step": 1778
+ },
+ {
+ "epoch": 1.7795004845112687,
+ "grad_norm": 0.7319238185882568,
+ "learning_rate": 7.101855649657991e-05,
+ "loss": 1.4471,
+ "step": 1779
+ },
+ {
+ "epoch": 1.7805007658403926,
+ "grad_norm": 0.6840189695358276,
+ "learning_rate": 7.091824662951827e-05,
+ "loss": 1.256,
+ "step": 1780
+ },
+ {
+ "epoch": 1.7815010471695163,
+ "grad_norm": 0.6095514297485352,
+ "learning_rate": 7.08179687180442e-05,
+ "loss": 1.2354,
+ "step": 1781
+ },
+ {
+ "epoch": 1.7825013284986402,
+ "grad_norm": 0.6160232424736023,
+ "learning_rate": 7.071772287234497e-05,
+ "loss": 1.2264,
+ "step": 1782
+ },
+ {
+ "epoch": 1.783501609827764,
+ "grad_norm": 0.8078712224960327,
+ "learning_rate": 7.06175092025726e-05,
+ "loss": 1.5183,
+ "step": 1783
+ },
+ {
+ "epoch": 1.784501891156888,
+ "grad_norm": 0.7782847881317139,
+ "learning_rate": 7.051732781884378e-05,
+ "loss": 1.3455,
+ "step": 1784
+ },
+ {
+ "epoch": 1.7855021724860118,
+ "grad_norm": 0.6435388326644897,
+ "learning_rate": 7.041717883123977e-05,
+ "loss": 1.3738,
+ "step": 1785
+ },
+ {
+ "epoch": 1.7865024538151355,
+ "grad_norm": 0.6329066157341003,
+ "learning_rate": 7.031706234980617e-05,
+ "loss": 1.3574,
+ "step": 1786
+ },
+ {
+ "epoch": 1.7875027351442592,
+ "grad_norm": 0.7085026502609253,
+ "learning_rate": 7.021697848455291e-05,
+ "loss": 1.4758,
+ "step": 1787
+ },
+ {
+ "epoch": 1.788503016473383,
+ "grad_norm": 0.6369369029998779,
+ "learning_rate": 7.011692734545403e-05,
+ "loss": 1.4166,
+ "step": 1788
+ },
+ {
+ "epoch": 1.789503297802507,
+ "grad_norm": 0.5777859687805176,
+ "learning_rate": 7.001690904244767e-05,
+ "loss": 1.2348,
+ "step": 1789
+ },
+ {
+ "epoch": 1.7905035791316308,
+ "grad_norm": 0.6327416896820068,
+ "learning_rate": 6.991692368543584e-05,
+ "loss": 1.3229,
+ "step": 1790
+ },
+ {
+ "epoch": 1.7915038604607547,
+ "grad_norm": 0.6739888191223145,
+ "learning_rate": 6.981697138428434e-05,
+ "loss": 1.3607,
+ "step": 1791
+ },
+ {
+ "epoch": 1.7925041417898784,
+ "grad_norm": 0.6856040954589844,
+ "learning_rate": 6.971705224882271e-05,
+ "loss": 1.3359,
+ "step": 1792
+ },
+ {
+ "epoch": 1.7935044231190023,
+ "grad_norm": 0.6209323406219482,
+ "learning_rate": 6.9617166388844e-05,
+ "loss": 1.1944,
+ "step": 1793
+ },
+ {
+ "epoch": 1.794504704448126,
+ "grad_norm": 0.6018400192260742,
+ "learning_rate": 6.951731391410468e-05,
+ "loss": 1.2403,
+ "step": 1794
+ },
+ {
+ "epoch": 1.7955049857772498,
+ "grad_norm": 0.7714535593986511,
+ "learning_rate": 6.94174949343246e-05,
+ "loss": 1.6654,
+ "step": 1795
+ },
+ {
+ "epoch": 1.7965052671063737,
+ "grad_norm": 0.6726022362709045,
+ "learning_rate": 6.931770955918674e-05,
+ "loss": 1.5234,
+ "step": 1796
+ },
+ {
+ "epoch": 1.7975055484354976,
+ "grad_norm": 0.6442826390266418,
+ "learning_rate": 6.921795789833723e-05,
+ "loss": 1.4253,
+ "step": 1797
+ },
+ {
+ "epoch": 1.7985058297646213,
+ "grad_norm": 0.6518099904060364,
+ "learning_rate": 6.911824006138503e-05,
+ "loss": 1.2595,
+ "step": 1798
+ },
+ {
+ "epoch": 1.7995061110937451,
+ "grad_norm": 0.6912478804588318,
+ "learning_rate": 6.901855615790206e-05,
+ "loss": 1.2241,
+ "step": 1799
+ },
+ {
+ "epoch": 1.8005063924228688,
+ "grad_norm": 0.7709338068962097,
+ "learning_rate": 6.891890629742288e-05,
+ "loss": 1.492,
+ "step": 1800
+ },
+ {
+ "epoch": 1.8015066737519927,
+ "grad_norm": 0.7332537770271301,
+ "learning_rate": 6.88192905894447e-05,
+ "loss": 1.2449,
+ "step": 1801
+ },
+ {
+ "epoch": 1.8025069550811166,
+ "grad_norm": 0.6881892681121826,
+ "learning_rate": 6.871970914342712e-05,
+ "loss": 1.5265,
+ "step": 1802
+ },
+ {
+ "epoch": 1.8035072364102405,
+ "grad_norm": 0.6630619764328003,
+ "learning_rate": 6.862016206879216e-05,
+ "loss": 1.4283,
+ "step": 1803
+ },
+ {
+ "epoch": 1.8045075177393644,
+ "grad_norm": 0.6601552367210388,
+ "learning_rate": 6.852064947492405e-05,
+ "loss": 1.296,
+ "step": 1804
+ },
+ {
+ "epoch": 1.805507799068488,
+ "grad_norm": 0.7896683216094971,
+ "learning_rate": 6.842117147116913e-05,
+ "loss": 1.3608,
+ "step": 1805
+ },
+ {
+ "epoch": 1.8065080803976117,
+ "grad_norm": 0.6206938624382019,
+ "learning_rate": 6.832172816683575e-05,
+ "loss": 1.2986,
+ "step": 1806
+ },
+ {
+ "epoch": 1.8075083617267356,
+ "grad_norm": 0.6818989515304565,
+ "learning_rate": 6.82223196711941e-05,
+ "loss": 1.3181,
+ "step": 1807
+ },
+ {
+ "epoch": 1.8085086430558595,
+ "grad_norm": 0.716175377368927,
+ "learning_rate": 6.812294609347615e-05,
+ "loss": 1.3468,
+ "step": 1808
+ },
+ {
+ "epoch": 1.8095089243849833,
+ "grad_norm": 0.6611238718032837,
+ "learning_rate": 6.802360754287547e-05,
+ "loss": 1.3609,
+ "step": 1809
+ },
+ {
+ "epoch": 1.8105092057141072,
+ "grad_norm": 0.637792706489563,
+ "learning_rate": 6.79243041285472e-05,
+ "loss": 1.3175,
+ "step": 1810
+ },
+ {
+ "epoch": 1.811509487043231,
+ "grad_norm": 0.6208909749984741,
+ "learning_rate": 6.782503595960782e-05,
+ "loss": 1.5203,
+ "step": 1811
+ },
+ {
+ "epoch": 1.8125097683723548,
+ "grad_norm": 0.8295003175735474,
+ "learning_rate": 6.772580314513508e-05,
+ "loss": 1.4409,
+ "step": 1812
+ },
+ {
+ "epoch": 1.8135100497014784,
+ "grad_norm": 0.6259089708328247,
+ "learning_rate": 6.762660579416791e-05,
+ "loss": 1.5972,
+ "step": 1813
+ },
+ {
+ "epoch": 1.8145103310306023,
+ "grad_norm": 0.6317711472511292,
+ "learning_rate": 6.752744401570625e-05,
+ "loss": 1.4977,
+ "step": 1814
+ },
+ {
+ "epoch": 1.8155106123597262,
+ "grad_norm": 0.6420071721076965,
+ "learning_rate": 6.742831791871096e-05,
+ "loss": 1.2393,
+ "step": 1815
+ },
+ {
+ "epoch": 1.81651089368885,
+ "grad_norm": 1.0690205097198486,
+ "learning_rate": 6.732922761210369e-05,
+ "loss": 1.1627,
+ "step": 1816
+ },
+ {
+ "epoch": 1.8175111750179738,
+ "grad_norm": 0.817659854888916,
+ "learning_rate": 6.723017320476679e-05,
+ "loss": 1.2888,
+ "step": 1817
+ },
+ {
+ "epoch": 1.8185114563470977,
+ "grad_norm": 0.5951409339904785,
+ "learning_rate": 6.713115480554313e-05,
+ "loss": 1.4495,
+ "step": 1818
+ },
+ {
+ "epoch": 1.8195117376762213,
+ "grad_norm": 0.6608357429504395,
+ "learning_rate": 6.7032172523236e-05,
+ "loss": 1.5617,
+ "step": 1819
+ },
+ {
+ "epoch": 1.8205120190053452,
+ "grad_norm": 0.5993407368659973,
+ "learning_rate": 6.693322646660906e-05,
+ "loss": 1.4812,
+ "step": 1820
+ },
+ {
+ "epoch": 1.821512300334469,
+ "grad_norm": 0.6179289817810059,
+ "learning_rate": 6.683431674438612e-05,
+ "loss": 1.2661,
+ "step": 1821
+ },
+ {
+ "epoch": 1.822512581663593,
+ "grad_norm": 0.6814618110656738,
+ "learning_rate": 6.673544346525107e-05,
+ "loss": 1.5599,
+ "step": 1822
+ },
+ {
+ "epoch": 1.8235128629927169,
+ "grad_norm": 0.7015717029571533,
+ "learning_rate": 6.663660673784777e-05,
+ "loss": 1.5481,
+ "step": 1823
+ },
+ {
+ "epoch": 1.8245131443218405,
+ "grad_norm": 0.6974764466285706,
+ "learning_rate": 6.653780667077985e-05,
+ "loss": 1.3893,
+ "step": 1824
+ },
+ {
+ "epoch": 1.8255134256509642,
+ "grad_norm": 0.614414393901825,
+ "learning_rate": 6.643904337261082e-05,
+ "loss": 1.4293,
+ "step": 1825
+ },
+ {
+ "epoch": 1.826513706980088,
+ "grad_norm": 0.6676987409591675,
+ "learning_rate": 6.634031695186362e-05,
+ "loss": 1.2622,
+ "step": 1826
+ },
+ {
+ "epoch": 1.827513988309212,
+ "grad_norm": 0.689335823059082,
+ "learning_rate": 6.624162751702076e-05,
+ "loss": 1.2908,
+ "step": 1827
+ },
+ {
+ "epoch": 1.8285142696383359,
+ "grad_norm": 0.6336010098457336,
+ "learning_rate": 6.614297517652409e-05,
+ "loss": 1.434,
+ "step": 1828
+ },
+ {
+ "epoch": 1.8295145509674597,
+ "grad_norm": 0.549472987651825,
+ "learning_rate": 6.604436003877464e-05,
+ "loss": 1.2401,
+ "step": 1829
+ },
+ {
+ "epoch": 1.8305148322965834,
+ "grad_norm": 0.6261748671531677,
+ "learning_rate": 6.594578221213265e-05,
+ "loss": 1.4202,
+ "step": 1830
+ },
+ {
+ "epoch": 1.831515113625707,
+ "grad_norm": 0.6431935429573059,
+ "learning_rate": 6.58472418049173e-05,
+ "loss": 1.4625,
+ "step": 1831
+ },
+ {
+ "epoch": 1.832515394954831,
+ "grad_norm": 0.726426362991333,
+ "learning_rate": 6.574873892540671e-05,
+ "loss": 1.4571,
+ "step": 1832
+ },
+ {
+ "epoch": 1.8335156762839548,
+ "grad_norm": 0.6399835348129272,
+ "learning_rate": 6.565027368183769e-05,
+ "loss": 1.5148,
+ "step": 1833
+ },
+ {
+ "epoch": 1.8345159576130787,
+ "grad_norm": 0.6948434114456177,
+ "learning_rate": 6.555184618240577e-05,
+ "loss": 1.3199,
+ "step": 1834
+ },
+ {
+ "epoch": 1.8355162389422026,
+ "grad_norm": 0.8539021611213684,
+ "learning_rate": 6.545345653526495e-05,
+ "loss": 1.4847,
+ "step": 1835
+ },
+ {
+ "epoch": 1.8365165202713263,
+ "grad_norm": 0.6419363617897034,
+ "learning_rate": 6.535510484852767e-05,
+ "loss": 1.4148,
+ "step": 1836
+ },
+ {
+ "epoch": 1.8375168016004502,
+ "grad_norm": 0.6716374158859253,
+ "learning_rate": 6.525679123026463e-05,
+ "loss": 1.3921,
+ "step": 1837
+ },
+ {
+ "epoch": 1.8385170829295738,
+ "grad_norm": 0.6869264841079712,
+ "learning_rate": 6.515851578850474e-05,
+ "loss": 1.5427,
+ "step": 1838
+ },
+ {
+ "epoch": 1.8395173642586977,
+ "grad_norm": 0.6462099552154541,
+ "learning_rate": 6.506027863123492e-05,
+ "loss": 1.44,
+ "step": 1839
+ },
+ {
+ "epoch": 1.8405176455878216,
+ "grad_norm": 0.7004585266113281,
+ "learning_rate": 6.496207986640004e-05,
+ "loss": 1.352,
+ "step": 1840
+ },
+ {
+ "epoch": 1.8415179269169455,
+ "grad_norm": 0.6404154896736145,
+ "learning_rate": 6.48639196019028e-05,
+ "loss": 1.309,
+ "step": 1841
+ },
+ {
+ "epoch": 1.8425182082460692,
+ "grad_norm": 0.6515551209449768,
+ "learning_rate": 6.476579794560356e-05,
+ "loss": 1.1967,
+ "step": 1842
+ },
+ {
+ "epoch": 1.843518489575193,
+ "grad_norm": 0.6698777675628662,
+ "learning_rate": 6.46677150053203e-05,
+ "loss": 1.4054,
+ "step": 1843
+ },
+ {
+ "epoch": 1.8445187709043167,
+ "grad_norm": 0.5269995927810669,
+ "learning_rate": 6.45696708888284e-05,
+ "loss": 1.3496,
+ "step": 1844
+ },
+ {
+ "epoch": 1.8455190522334406,
+ "grad_norm": 0.7120770215988159,
+ "learning_rate": 6.447166570386063e-05,
+ "loss": 1.449,
+ "step": 1845
+ },
+ {
+ "epoch": 1.8465193335625645,
+ "grad_norm": 0.6120896935462952,
+ "learning_rate": 6.437369955810699e-05,
+ "loss": 1.3471,
+ "step": 1846
+ },
+ {
+ "epoch": 1.8475196148916884,
+ "grad_norm": 0.7216696739196777,
+ "learning_rate": 6.42757725592145e-05,
+ "loss": 1.681,
+ "step": 1847
+ },
+ {
+ "epoch": 1.8485198962208123,
+ "grad_norm": 0.7460939288139343,
+ "learning_rate": 6.417788481478728e-05,
+ "loss": 1.3102,
+ "step": 1848
+ },
+ {
+ "epoch": 1.849520177549936,
+ "grad_norm": 0.6691422462463379,
+ "learning_rate": 6.40800364323862e-05,
+ "loss": 1.2691,
+ "step": 1849
+ },
+ {
+ "epoch": 1.8505204588790596,
+ "grad_norm": 0.5813978314399719,
+ "learning_rate": 6.398222751952899e-05,
+ "loss": 1.3182,
+ "step": 1850
+ },
+ {
+ "epoch": 1.8515207402081835,
+ "grad_norm": 0.7524621486663818,
+ "learning_rate": 6.388445818368991e-05,
+ "loss": 1.2682,
+ "step": 1851
+ },
+ {
+ "epoch": 1.8525210215373074,
+ "grad_norm": 0.6798551678657532,
+ "learning_rate": 6.378672853229981e-05,
+ "loss": 1.3795,
+ "step": 1852
+ },
+ {
+ "epoch": 1.8535213028664312,
+ "grad_norm": 0.6338953971862793,
+ "learning_rate": 6.368903867274585e-05,
+ "loss": 1.3801,
+ "step": 1853
+ },
+ {
+ "epoch": 1.8545215841955551,
+ "grad_norm": 0.6461024880409241,
+ "learning_rate": 6.35913887123716e-05,
+ "loss": 1.3359,
+ "step": 1854
+ },
+ {
+ "epoch": 1.8555218655246788,
+ "grad_norm": 0.7232131958007812,
+ "learning_rate": 6.34937787584767e-05,
+ "loss": 1.4629,
+ "step": 1855
+ },
+ {
+ "epoch": 1.8565221468538027,
+ "grad_norm": 0.6489596366882324,
+ "learning_rate": 6.339620891831678e-05,
+ "loss": 1.2078,
+ "step": 1856
+ },
+ {
+ "epoch": 1.8575224281829263,
+ "grad_norm": 0.5821114778518677,
+ "learning_rate": 6.329867929910347e-05,
+ "loss": 1.5002,
+ "step": 1857
+ },
+ {
+ "epoch": 1.8585227095120502,
+ "grad_norm": 0.5860056281089783,
+ "learning_rate": 6.32011900080042e-05,
+ "loss": 1.1737,
+ "step": 1858
+ },
+ {
+ "epoch": 1.8595229908411741,
+ "grad_norm": 0.5989000797271729,
+ "learning_rate": 6.310374115214204e-05,
+ "loss": 1.3945,
+ "step": 1859
+ },
+ {
+ "epoch": 1.860523272170298,
+ "grad_norm": 0.7010142803192139,
+ "learning_rate": 6.30063328385957e-05,
+ "loss": 1.4431,
+ "step": 1860
+ },
+ {
+ "epoch": 1.8615235534994217,
+ "grad_norm": 0.5994375348091125,
+ "learning_rate": 6.290896517439925e-05,
+ "loss": 1.5378,
+ "step": 1861
+ },
+ {
+ "epoch": 1.8625238348285456,
+ "grad_norm": 0.7813047170639038,
+ "learning_rate": 6.281163826654218e-05,
+ "loss": 1.4424,
+ "step": 1862
+ },
+ {
+ "epoch": 1.8635241161576692,
+ "grad_norm": 0.8812029361724854,
+ "learning_rate": 6.271435222196916e-05,
+ "loss": 1.4378,
+ "step": 1863
+ },
+ {
+ "epoch": 1.864524397486793,
+ "grad_norm": 0.7037883400917053,
+ "learning_rate": 6.261710714757994e-05,
+ "loss": 1.5074,
+ "step": 1864
+ },
+ {
+ "epoch": 1.865524678815917,
+ "grad_norm": 0.6375555396080017,
+ "learning_rate": 6.251990315022927e-05,
+ "loss": 1.5777,
+ "step": 1865
+ },
+ {
+ "epoch": 1.8665249601450409,
+ "grad_norm": 0.7071056365966797,
+ "learning_rate": 6.24227403367268e-05,
+ "loss": 1.5755,
+ "step": 1866
+ },
+ {
+ "epoch": 1.8675252414741648,
+ "grad_norm": 0.69529789686203,
+ "learning_rate": 6.232561881383687e-05,
+ "loss": 1.5843,
+ "step": 1867
+ },
+ {
+ "epoch": 1.8685255228032884,
+ "grad_norm": 0.7598209977149963,
+ "learning_rate": 6.222853868827839e-05,
+ "loss": 1.3657,
+ "step": 1868
+ },
+ {
+ "epoch": 1.869525804132412,
+ "grad_norm": 0.6517311930656433,
+ "learning_rate": 6.213150006672499e-05,
+ "loss": 1.5294,
+ "step": 1869
+ },
+ {
+ "epoch": 1.870526085461536,
+ "grad_norm": 0.5940656065940857,
+ "learning_rate": 6.20345030558045e-05,
+ "loss": 1.4061,
+ "step": 1870
+ },
+ {
+ "epoch": 1.8715263667906599,
+ "grad_norm": 0.6668642163276672,
+ "learning_rate": 6.193754776209911e-05,
+ "loss": 1.339,
+ "step": 1871
+ },
+ {
+ "epoch": 1.8725266481197838,
+ "grad_norm": 0.6001901626586914,
+ "learning_rate": 6.184063429214515e-05,
+ "loss": 1.2836,
+ "step": 1872
+ },
+ {
+ "epoch": 1.8735269294489076,
+ "grad_norm": 0.703733503818512,
+ "learning_rate": 6.174376275243299e-05,
+ "loss": 1.3441,
+ "step": 1873
+ },
+ {
+ "epoch": 1.8745272107780313,
+ "grad_norm": 0.5734414458274841,
+ "learning_rate": 6.164693324940694e-05,
+ "loss": 1.09,
+ "step": 1874
+ },
+ {
+ "epoch": 1.8755274921071552,
+ "grad_norm": 0.6441298127174377,
+ "learning_rate": 6.15501458894651e-05,
+ "loss": 1.2849,
+ "step": 1875
+ },
+ {
+ "epoch": 1.8765277734362789,
+ "grad_norm": 0.6523350477218628,
+ "learning_rate": 6.145340077895929e-05,
+ "loss": 1.4681,
+ "step": 1876
+ },
+ {
+ "epoch": 1.8775280547654027,
+ "grad_norm": 0.6061530113220215,
+ "learning_rate": 6.135669802419488e-05,
+ "loss": 1.3961,
+ "step": 1877
+ },
+ {
+ "epoch": 1.8785283360945266,
+ "grad_norm": 0.7068478465080261,
+ "learning_rate": 6.126003773143072e-05,
+ "loss": 1.2469,
+ "step": 1878
+ },
+ {
+ "epoch": 1.8795286174236505,
+ "grad_norm": 0.6139722466468811,
+ "learning_rate": 6.116342000687896e-05,
+ "loss": 1.4774,
+ "step": 1879
+ },
+ {
+ "epoch": 1.8805288987527742,
+ "grad_norm": 0.7666826248168945,
+ "learning_rate": 6.106684495670506e-05,
+ "loss": 1.4634,
+ "step": 1880
+ },
+ {
+ "epoch": 1.881529180081898,
+ "grad_norm": 0.5872985124588013,
+ "learning_rate": 6.097031268702746e-05,
+ "loss": 1.2302,
+ "step": 1881
+ },
+ {
+ "epoch": 1.8825294614110217,
+ "grad_norm": 0.6170175075531006,
+ "learning_rate": 6.087382330391774e-05,
+ "loss": 1.0561,
+ "step": 1882
+ },
+ {
+ "epoch": 1.8835297427401456,
+ "grad_norm": 0.7397921085357666,
+ "learning_rate": 6.077737691340023e-05,
+ "loss": 1.3574,
+ "step": 1883
+ },
+ {
+ "epoch": 1.8845300240692695,
+ "grad_norm": 0.5703612565994263,
+ "learning_rate": 6.0680973621452105e-05,
+ "loss": 1.2651,
+ "step": 1884
+ },
+ {
+ "epoch": 1.8855303053983934,
+ "grad_norm": 0.6688309907913208,
+ "learning_rate": 6.0584613534003144e-05,
+ "loss": 1.5202,
+ "step": 1885
+ },
+ {
+ "epoch": 1.8865305867275173,
+ "grad_norm": 0.6952941417694092,
+ "learning_rate": 6.0488296756935636e-05,
+ "loss": 1.3407,
+ "step": 1886
+ },
+ {
+ "epoch": 1.887530868056641,
+ "grad_norm": 0.6440621018409729,
+ "learning_rate": 6.039202339608432e-05,
+ "loss": 1.3836,
+ "step": 1887
+ },
+ {
+ "epoch": 1.8885311493857646,
+ "grad_norm": 0.7453868985176086,
+ "learning_rate": 6.0295793557236203e-05,
+ "loss": 1.3744,
+ "step": 1888
+ },
+ {
+ "epoch": 1.8895314307148885,
+ "grad_norm": 0.5936272740364075,
+ "learning_rate": 6.019960734613047e-05,
+ "loss": 1.2957,
+ "step": 1889
+ },
+ {
+ "epoch": 1.8905317120440124,
+ "grad_norm": 0.6652967929840088,
+ "learning_rate": 6.010346486845837e-05,
+ "loss": 1.3191,
+ "step": 1890
+ },
+ {
+ "epoch": 1.8915319933731363,
+ "grad_norm": 0.5736771821975708,
+ "learning_rate": 6.0007366229863117e-05,
+ "loss": 1.2785,
+ "step": 1891
+ },
+ {
+ "epoch": 1.8925322747022602,
+ "grad_norm": 0.6693833470344543,
+ "learning_rate": 5.991131153593971e-05,
+ "loss": 1.2598,
+ "step": 1892
+ },
+ {
+ "epoch": 1.8935325560313838,
+ "grad_norm": 0.6824096441268921,
+ "learning_rate": 5.981530089223489e-05,
+ "loss": 1.6205,
+ "step": 1893
+ },
+ {
+ "epoch": 1.8945328373605075,
+ "grad_norm": 0.6346132159233093,
+ "learning_rate": 5.971933440424703e-05,
+ "loss": 1.2442,
+ "step": 1894
+ },
+ {
+ "epoch": 1.8955331186896314,
+ "grad_norm": 0.6125045418739319,
+ "learning_rate": 5.9623412177425886e-05,
+ "loss": 1.3326,
+ "step": 1895
+ },
+ {
+ "epoch": 1.8965334000187553,
+ "grad_norm": 0.5784642696380615,
+ "learning_rate": 5.952753431717268e-05,
+ "loss": 1.2986,
+ "step": 1896
+ },
+ {
+ "epoch": 1.8975336813478791,
+ "grad_norm": 0.6546468138694763,
+ "learning_rate": 5.9431700928839805e-05,
+ "loss": 1.2985,
+ "step": 1897
+ },
+ {
+ "epoch": 1.898533962677003,
+ "grad_norm": 0.6771805286407471,
+ "learning_rate": 5.933591211773082e-05,
+ "loss": 1.3101,
+ "step": 1898
+ },
+ {
+ "epoch": 1.8995342440061267,
+ "grad_norm": 0.672447681427002,
+ "learning_rate": 5.924016798910037e-05,
+ "loss": 1.3677,
+ "step": 1899
+ },
+ {
+ "epoch": 1.9005345253352506,
+ "grad_norm": 0.6948104500770569,
+ "learning_rate": 5.914446864815388e-05,
+ "loss": 1.2893,
+ "step": 1900
+ },
+ {
+ "epoch": 1.9015348066643742,
+ "grad_norm": 0.6217272877693176,
+ "learning_rate": 5.9048814200047675e-05,
+ "loss": 1.3604,
+ "step": 1901
+ },
+ {
+ "epoch": 1.9025350879934981,
+ "grad_norm": 0.7615220546722412,
+ "learning_rate": 5.895320474988864e-05,
+ "loss": 1.4518,
+ "step": 1902
+ },
+ {
+ "epoch": 1.903535369322622,
+ "grad_norm": 0.6633756160736084,
+ "learning_rate": 5.885764040273426e-05,
+ "loss": 1.4675,
+ "step": 1903
+ },
+ {
+ "epoch": 1.904535650651746,
+ "grad_norm": 0.877419114112854,
+ "learning_rate": 5.876212126359251e-05,
+ "loss": 1.2659,
+ "step": 1904
+ },
+ {
+ "epoch": 1.9055359319808696,
+ "grad_norm": 0.6730911731719971,
+ "learning_rate": 5.866664743742162e-05,
+ "loss": 1.4772,
+ "step": 1905
+ },
+ {
+ "epoch": 1.9065362133099935,
+ "grad_norm": 0.6866170763969421,
+ "learning_rate": 5.857121902913008e-05,
+ "loss": 1.503,
+ "step": 1906
+ },
+ {
+ "epoch": 1.9075364946391171,
+ "grad_norm": 0.6140307784080505,
+ "learning_rate": 5.8475836143576433e-05,
+ "loss": 1.3118,
+ "step": 1907
+ },
+ {
+ "epoch": 1.908536775968241,
+ "grad_norm": 0.6074461340904236,
+ "learning_rate": 5.838049888556925e-05,
+ "loss": 1.2815,
+ "step": 1908
+ },
+ {
+ "epoch": 1.909537057297365,
+ "grad_norm": 0.6943862438201904,
+ "learning_rate": 5.8285207359866936e-05,
+ "loss": 1.2693,
+ "step": 1909
+ },
+ {
+ "epoch": 1.9105373386264888,
+ "grad_norm": 0.7455725073814392,
+ "learning_rate": 5.8189961671177574e-05,
+ "loss": 1.6509,
+ "step": 1910
+ },
+ {
+ "epoch": 1.9115376199556127,
+ "grad_norm": 0.645656943321228,
+ "learning_rate": 5.809476192415905e-05,
+ "loss": 1.3285,
+ "step": 1911
+ },
+ {
+ "epoch": 1.9125379012847363,
+ "grad_norm": 0.6280725002288818,
+ "learning_rate": 5.7999608223418534e-05,
+ "loss": 1.1409,
+ "step": 1912
+ },
+ {
+ "epoch": 1.91353818261386,
+ "grad_norm": 0.6084076762199402,
+ "learning_rate": 5.790450067351291e-05,
+ "loss": 1.4,
+ "step": 1913
+ },
+ {
+ "epoch": 1.9145384639429839,
+ "grad_norm": 0.5778687596321106,
+ "learning_rate": 5.780943937894805e-05,
+ "loss": 1.4359,
+ "step": 1914
+ },
+ {
+ "epoch": 1.9155387452721078,
+ "grad_norm": 0.5729363560676575,
+ "learning_rate": 5.771442444417918e-05,
+ "loss": 1.1936,
+ "step": 1915
+ },
+ {
+ "epoch": 1.9165390266012317,
+ "grad_norm": 0.5981405973434448,
+ "learning_rate": 5.761945597361054e-05,
+ "loss": 1.4219,
+ "step": 1916
+ },
+ {
+ "epoch": 1.9175393079303555,
+ "grad_norm": 0.6797769069671631,
+ "learning_rate": 5.752453407159522e-05,
+ "loss": 1.2791,
+ "step": 1917
+ },
+ {
+ "epoch": 1.9185395892594792,
+ "grad_norm": 0.6143385171890259,
+ "learning_rate": 5.742965884243532e-05,
+ "loss": 1.1946,
+ "step": 1918
+ },
+ {
+ "epoch": 1.919539870588603,
+ "grad_norm": 0.7437219023704529,
+ "learning_rate": 5.733483039038149e-05,
+ "loss": 1.4242,
+ "step": 1919
+ },
+ {
+ "epoch": 1.9205401519177268,
+ "grad_norm": 0.6434136033058167,
+ "learning_rate": 5.724004881963311e-05,
+ "loss": 1.3105,
+ "step": 1920
+ },
+ {
+ "epoch": 1.9215404332468506,
+ "grad_norm": 0.6449226140975952,
+ "learning_rate": 5.714531423433791e-05,
+ "loss": 1.3863,
+ "step": 1921
+ },
+ {
+ "epoch": 1.9225407145759745,
+ "grad_norm": 0.7470558881759644,
+ "learning_rate": 5.705062673859216e-05,
+ "loss": 1.3502,
+ "step": 1922
+ },
+ {
+ "epoch": 1.9235409959050984,
+ "grad_norm": 0.6595833897590637,
+ "learning_rate": 5.69559864364402e-05,
+ "loss": 1.4515,
+ "step": 1923
+ },
+ {
+ "epoch": 1.924541277234222,
+ "grad_norm": 0.6996669769287109,
+ "learning_rate": 5.6861393431874675e-05,
+ "loss": 1.5347,
+ "step": 1924
+ },
+ {
+ "epoch": 1.925541558563346,
+ "grad_norm": 0.6640759706497192,
+ "learning_rate": 5.676684782883615e-05,
+ "loss": 1.322,
+ "step": 1925
+ },
+ {
+ "epoch": 1.9265418398924696,
+ "grad_norm": 0.6044187545776367,
+ "learning_rate": 5.667234973121317e-05,
+ "loss": 1.3457,
+ "step": 1926
+ },
+ {
+ "epoch": 1.9275421212215935,
+ "grad_norm": 0.6154062747955322,
+ "learning_rate": 5.6577899242842025e-05,
+ "loss": 1.4132,
+ "step": 1927
+ },
+ {
+ "epoch": 1.9285424025507174,
+ "grad_norm": 0.730377733707428,
+ "learning_rate": 5.648349646750673e-05,
+ "loss": 1.4286,
+ "step": 1928
+ },
+ {
+ "epoch": 1.9295426838798413,
+ "grad_norm": 0.6812162399291992,
+ "learning_rate": 5.6389141508938903e-05,
+ "loss": 1.4494,
+ "step": 1929
+ },
+ {
+ "epoch": 1.9305429652089652,
+ "grad_norm": 0.6427568197250366,
+ "learning_rate": 5.629483447081751e-05,
+ "loss": 1.4093,
+ "step": 1930
+ },
+ {
+ "epoch": 1.9315432465380888,
+ "grad_norm": 0.672756016254425,
+ "learning_rate": 5.620057545676901e-05,
+ "loss": 1.3817,
+ "step": 1931
+ },
+ {
+ "epoch": 1.9325435278672125,
+ "grad_norm": 0.6241796016693115,
+ "learning_rate": 5.610636457036693e-05,
+ "loss": 1.3074,
+ "step": 1932
+ },
+ {
+ "epoch": 1.9335438091963364,
+ "grad_norm": 0.5853481888771057,
+ "learning_rate": 5.601220191513208e-05,
+ "loss": 1.3297,
+ "step": 1933
+ },
+ {
+ "epoch": 1.9345440905254603,
+ "grad_norm": 0.5953206419944763,
+ "learning_rate": 5.591808759453214e-05,
+ "loss": 1.4803,
+ "step": 1934
+ },
+ {
+ "epoch": 1.9355443718545842,
+ "grad_norm": 0.6578570008277893,
+ "learning_rate": 5.5824021711981686e-05,
+ "loss": 1.1907,
+ "step": 1935
+ },
+ {
+ "epoch": 1.936544653183708,
+ "grad_norm": 0.7336297035217285,
+ "learning_rate": 5.573000437084221e-05,
+ "loss": 1.3186,
+ "step": 1936
+ },
+ {
+ "epoch": 1.9375449345128317,
+ "grad_norm": 0.594570517539978,
+ "learning_rate": 5.563603567442168e-05,
+ "loss": 1.499,
+ "step": 1937
+ },
+ {
+ "epoch": 1.9385452158419556,
+ "grad_norm": 0.6666537523269653,
+ "learning_rate": 5.554211572597477e-05,
+ "loss": 1.4292,
+ "step": 1938
+ },
+ {
+ "epoch": 1.9395454971710793,
+ "grad_norm": 0.6429474353790283,
+ "learning_rate": 5.544824462870244e-05,
+ "loss": 1.5317,
+ "step": 1939
+ },
+ {
+ "epoch": 1.9405457785002032,
+ "grad_norm": 0.6204141974449158,
+ "learning_rate": 5.5354422485752125e-05,
+ "loss": 1.3496,
+ "step": 1940
+ },
+ {
+ "epoch": 1.941546059829327,
+ "grad_norm": 0.6017738580703735,
+ "learning_rate": 5.5260649400217326e-05,
+ "loss": 1.2879,
+ "step": 1941
+ },
+ {
+ "epoch": 1.942546341158451,
+ "grad_norm": 0.7276145219802856,
+ "learning_rate": 5.5166925475137735e-05,
+ "loss": 1.4061,
+ "step": 1942
+ },
+ {
+ "epoch": 1.9435466224875746,
+ "grad_norm": 0.5995808243751526,
+ "learning_rate": 5.507325081349903e-05,
+ "loss": 1.3676,
+ "step": 1943
+ },
+ {
+ "epoch": 1.9445469038166985,
+ "grad_norm": 0.7158801555633545,
+ "learning_rate": 5.497962551823266e-05,
+ "loss": 1.4506,
+ "step": 1944
+ },
+ {
+ "epoch": 1.9455471851458221,
+ "grad_norm": 0.7192680239677429,
+ "learning_rate": 5.488604969221597e-05,
+ "loss": 1.276,
+ "step": 1945
+ },
+ {
+ "epoch": 1.946547466474946,
+ "grad_norm": 0.5900127291679382,
+ "learning_rate": 5.479252343827178e-05,
+ "loss": 1.2548,
+ "step": 1946
+ },
+ {
+ "epoch": 1.94754774780407,
+ "grad_norm": 0.8271359801292419,
+ "learning_rate": 5.469904685916861e-05,
+ "loss": 1.2133,
+ "step": 1947
+ },
+ {
+ "epoch": 1.9485480291331938,
+ "grad_norm": 0.6850492358207703,
+ "learning_rate": 5.460562005762024e-05,
+ "loss": 1.4255,
+ "step": 1948
+ },
+ {
+ "epoch": 1.9495483104623177,
+ "grad_norm": 0.6316859722137451,
+ "learning_rate": 5.4512243136285915e-05,
+ "loss": 1.2897,
+ "step": 1949
+ },
+ {
+ "epoch": 1.9505485917914414,
+ "grad_norm": 0.6135215759277344,
+ "learning_rate": 5.441891619776987e-05,
+ "loss": 1.3193,
+ "step": 1950
+ },
+ {
+ "epoch": 1.951548873120565,
+ "grad_norm": 0.7005714178085327,
+ "learning_rate": 5.432563934462166e-05,
+ "loss": 1.4412,
+ "step": 1951
+ },
+ {
+ "epoch": 1.952549154449689,
+ "grad_norm": 0.6692869067192078,
+ "learning_rate": 5.423241267933557e-05,
+ "loss": 1.2523,
+ "step": 1952
+ },
+ {
+ "epoch": 1.9535494357788128,
+ "grad_norm": 0.5568224191665649,
+ "learning_rate": 5.4139236304350935e-05,
+ "loss": 1.3503,
+ "step": 1953
+ },
+ {
+ "epoch": 1.9545497171079367,
+ "grad_norm": 0.6143470406532288,
+ "learning_rate": 5.404611032205169e-05,
+ "loss": 1.4381,
+ "step": 1954
+ },
+ {
+ "epoch": 1.9555499984370606,
+ "grad_norm": 0.6193466782569885,
+ "learning_rate": 5.3953034834766416e-05,
+ "loss": 1.4821,
+ "step": 1955
+ },
+ {
+ "epoch": 1.9565502797661842,
+ "grad_norm": 0.6553237438201904,
+ "learning_rate": 5.386000994476832e-05,
+ "loss": 1.3022,
+ "step": 1956
+ },
+ {
+ "epoch": 1.957550561095308,
+ "grad_norm": 0.7644321918487549,
+ "learning_rate": 5.376703575427481e-05,
+ "loss": 1.444,
+ "step": 1957
+ },
+ {
+ "epoch": 1.9585508424244318,
+ "grad_norm": 0.720217227935791,
+ "learning_rate": 5.367411236544786e-05,
+ "loss": 1.46,
+ "step": 1958
+ },
+ {
+ "epoch": 1.9595511237535557,
+ "grad_norm": 0.6305975317955017,
+ "learning_rate": 5.3581239880393375e-05,
+ "loss": 1.5285,
+ "step": 1959
+ },
+ {
+ "epoch": 1.9605514050826796,
+ "grad_norm": 0.5860128998756409,
+ "learning_rate": 5.3488418401161475e-05,
+ "loss": 1.254,
+ "step": 1960
+ },
+ {
+ "epoch": 1.9615516864118034,
+ "grad_norm": 0.6627411842346191,
+ "learning_rate": 5.339564802974615e-05,
+ "loss": 1.2639,
+ "step": 1961
+ },
+ {
+ "epoch": 1.962551967740927,
+ "grad_norm": 0.614637017250061,
+ "learning_rate": 5.33029288680852e-05,
+ "loss": 1.3714,
+ "step": 1962
+ },
+ {
+ "epoch": 1.963552249070051,
+ "grad_norm": 0.6468274593353271,
+ "learning_rate": 5.321026101806032e-05,
+ "loss": 1.3802,
+ "step": 1963
+ },
+ {
+ "epoch": 1.9645525303991747,
+ "grad_norm": 0.6303175091743469,
+ "learning_rate": 5.311764458149664e-05,
+ "loss": 1.3505,
+ "step": 1964
+ },
+ {
+ "epoch": 1.9655528117282985,
+ "grad_norm": 0.6070892214775085,
+ "learning_rate": 5.302507966016295e-05,
+ "loss": 1.4039,
+ "step": 1965
+ },
+ {
+ "epoch": 1.9665530930574224,
+ "grad_norm": 0.6614121198654175,
+ "learning_rate": 5.293256635577126e-05,
+ "loss": 1.324,
+ "step": 1966
+ },
+ {
+ "epoch": 1.9675533743865463,
+ "grad_norm": 0.6081134676933289,
+ "learning_rate": 5.284010476997705e-05,
+ "loss": 1.2839,
+ "step": 1967
+ },
+ {
+ "epoch": 1.96855365571567,
+ "grad_norm": 0.707693338394165,
+ "learning_rate": 5.274769500437882e-05,
+ "loss": 1.4179,
+ "step": 1968
+ },
+ {
+ "epoch": 1.9695539370447939,
+ "grad_norm": 0.7113189697265625,
+ "learning_rate": 5.265533716051825e-05,
+ "loss": 1.1911,
+ "step": 1969
+ },
+ {
+ "epoch": 1.9705542183739175,
+ "grad_norm": 0.7755022048950195,
+ "learning_rate": 5.256303133987982e-05,
+ "loss": 1.3266,
+ "step": 1970
+ },
+ {
+ "epoch": 1.9715544997030414,
+ "grad_norm": 0.6285157203674316,
+ "learning_rate": 5.247077764389099e-05,
+ "loss": 1.2468,
+ "step": 1971
+ },
+ {
+ "epoch": 1.9725547810321653,
+ "grad_norm": 0.6110594272613525,
+ "learning_rate": 5.2378576173921934e-05,
+ "loss": 1.2845,
+ "step": 1972
+ },
+ {
+ "epoch": 1.9735550623612892,
+ "grad_norm": 0.7382394671440125,
+ "learning_rate": 5.22864270312853e-05,
+ "loss": 1.4531,
+ "step": 1973
+ },
+ {
+ "epoch": 1.974555343690413,
+ "grad_norm": 3.4922046661376953,
+ "learning_rate": 5.219433031723641e-05,
+ "loss": 1.174,
+ "step": 1974
+ },
+ {
+ "epoch": 1.9755556250195367,
+ "grad_norm": 0.5983368158340454,
+ "learning_rate": 5.210228613297281e-05,
+ "loss": 1.2157,
+ "step": 1975
+ },
+ {
+ "epoch": 1.9765559063486604,
+ "grad_norm": 0.6913344264030457,
+ "learning_rate": 5.201029457963451e-05,
+ "loss": 1.3994,
+ "step": 1976
+ },
+ {
+ "epoch": 1.9775561876777843,
+ "grad_norm": 0.5928197503089905,
+ "learning_rate": 5.191835575830352e-05,
+ "loss": 1.5831,
+ "step": 1977
+ },
+ {
+ "epoch": 1.9785564690069082,
+ "grad_norm": 0.6904213428497314,
+ "learning_rate": 5.1826469770004026e-05,
+ "loss": 1.3408,
+ "step": 1978
+ },
+ {
+ "epoch": 1.979556750336032,
+ "grad_norm": 0.6584596633911133,
+ "learning_rate": 5.1734636715702043e-05,
+ "loss": 1.3828,
+ "step": 1979
+ },
+ {
+ "epoch": 1.980557031665156,
+ "grad_norm": 0.7366130352020264,
+ "learning_rate": 5.1642856696305575e-05,
+ "loss": 1.5174,
+ "step": 1980
+ },
+ {
+ "epoch": 1.9815573129942796,
+ "grad_norm": 0.6994180679321289,
+ "learning_rate": 5.155112981266422e-05,
+ "loss": 1.6449,
+ "step": 1981
+ },
+ {
+ "epoch": 1.9825575943234035,
+ "grad_norm": 0.685383677482605,
+ "learning_rate": 5.145945616556921e-05,
+ "loss": 1.2966,
+ "step": 1982
+ },
+ {
+ "epoch": 1.9835578756525272,
+ "grad_norm": 0.6237232685089111,
+ "learning_rate": 5.136783585575336e-05,
+ "loss": 1.3721,
+ "step": 1983
+ },
+ {
+ "epoch": 1.984558156981651,
+ "grad_norm": 0.6665890216827393,
+ "learning_rate": 5.127626898389075e-05,
+ "loss": 1.3245,
+ "step": 1984
+ },
+ {
+ "epoch": 1.985558438310775,
+ "grad_norm": 0.607271134853363,
+ "learning_rate": 5.118475565059691e-05,
+ "loss": 1.1436,
+ "step": 1985
+ },
+ {
+ "epoch": 1.9865587196398988,
+ "grad_norm": 0.6433306336402893,
+ "learning_rate": 5.109329595642829e-05,
+ "loss": 1.521,
+ "step": 1986
+ },
+ {
+ "epoch": 1.9875590009690225,
+ "grad_norm": 0.6605822443962097,
+ "learning_rate": 5.1001890001882734e-05,
+ "loss": 1.3559,
+ "step": 1987
+ },
+ {
+ "epoch": 1.9885592822981464,
+ "grad_norm": 0.6377484202384949,
+ "learning_rate": 5.091053788739878e-05,
+ "loss": 1.3607,
+ "step": 1988
+ },
+ {
+ "epoch": 1.98955956362727,
+ "grad_norm": 0.6457136273384094,
+ "learning_rate": 5.081923971335582e-05,
+ "loss": 1.2594,
+ "step": 1989
+ },
+ {
+ "epoch": 1.990559844956394,
+ "grad_norm": 0.6249425411224365,
+ "learning_rate": 5.072799558007415e-05,
+ "loss": 1.4896,
+ "step": 1990
+ },
+ {
+ "epoch": 1.9915601262855178,
+ "grad_norm": 0.6000842452049255,
+ "learning_rate": 5.063680558781445e-05,
+ "loss": 1.1787,
+ "step": 1991
+ },
+ {
+ "epoch": 1.9925604076146417,
+ "grad_norm": 0.5997797250747681,
+ "learning_rate": 5.0545669836778144e-05,
+ "loss": 1.4464,
+ "step": 1992
+ },
+ {
+ "epoch": 1.9935606889437656,
+ "grad_norm": 0.665529191493988,
+ "learning_rate": 5.045458842710684e-05,
+ "loss": 1.5382,
+ "step": 1993
+ },
+ {
+ "epoch": 1.9945609702728893,
+ "grad_norm": 0.6204399466514587,
+ "learning_rate": 5.036356145888263e-05,
+ "loss": 1.218,
+ "step": 1994
+ },
+ {
+ "epoch": 1.995561251602013,
+ "grad_norm": 0.6627510786056519,
+ "learning_rate": 5.0272589032127594e-05,
+ "loss": 1.5239,
+ "step": 1995
+ },
+ {
+ "epoch": 1.9965615329311368,
+ "grad_norm": 0.708292543888092,
+ "learning_rate": 5.0181671246804064e-05,
+ "loss": 1.4107,
+ "step": 1996
+ },
+ {
+ "epoch": 1.9975618142602607,
+ "grad_norm": 0.5863770842552185,
+ "learning_rate": 5.009080820281415e-05,
+ "loss": 1.1877,
+ "step": 1997
+ },
+ {
+ "epoch": 1.9985620955893846,
+ "grad_norm": 0.5950385928153992,
+ "learning_rate": 5.000000000000002e-05,
+ "loss": 1.2165,
+ "step": 1998
+ },
+ {
+ "epoch": 1.9995623769185085,
+ "grad_norm": 0.6033083200454712,
+ "learning_rate": 4.990924673814336e-05,
+ "loss": 1.3102,
+ "step": 1999
+ },
+ {
+ "epoch": 2.0005626582476324,
+ "grad_norm": 0.5440614223480225,
+ "learning_rate": 4.981854851696568e-05,
+ "loss": 1.0143,
+ "step": 2000
+ },
+ {
+ "epoch": 2.001562939576756,
+ "grad_norm": 0.6712716221809387,
+ "learning_rate": 4.972790543612783e-05,
+ "loss": 1.5009,
+ "step": 2001
+ },
+ {
+ "epoch": 2.0025632209058797,
+ "grad_norm": 0.6744182109832764,
+ "learning_rate": 4.963731759523022e-05,
+ "loss": 1.388,
+ "step": 2002
+ },
+ {
+ "epoch": 2.0035635022350036,
+ "grad_norm": 0.6512018442153931,
+ "learning_rate": 4.954678509381253e-05,
+ "loss": 1.1774,
+ "step": 2003
+ },
+ {
+ "epoch": 2.0045637835641275,
+ "grad_norm": 0.6344821453094482,
+ "learning_rate": 4.945630803135354e-05,
+ "loss": 1.2775,
+ "step": 2004
+ },
+ {
+ "epoch": 2.0055640648932513,
+ "grad_norm": 0.6289503574371338,
+ "learning_rate": 4.9365886507271243e-05,
+ "loss": 1.0352,
+ "step": 2005
+ },
+ {
+ "epoch": 2.0065643462223752,
+ "grad_norm": 0.6312137842178345,
+ "learning_rate": 4.9275520620922477e-05,
+ "loss": 1.4537,
+ "step": 2006
+ },
+ {
+ "epoch": 2.0075646275514987,
+ "grad_norm": 0.6128604412078857,
+ "learning_rate": 4.918521047160308e-05,
+ "loss": 1.1413,
+ "step": 2007
+ },
+ {
+ "epoch": 2.0085649088806226,
+ "grad_norm": 0.651336669921875,
+ "learning_rate": 4.9094956158547535e-05,
+ "loss": 1.1284,
+ "step": 2008
+ },
+ {
+ "epoch": 2.0095651902097464,
+ "grad_norm": 0.6941661834716797,
+ "learning_rate": 4.900475778092897e-05,
+ "loss": 1.3201,
+ "step": 2009
+ },
+ {
+ "epoch": 2.0105654715388703,
+ "grad_norm": 0.7407479882240295,
+ "learning_rate": 4.891461543785917e-05,
+ "loss": 1.3644,
+ "step": 2010
+ },
+ {
+ "epoch": 2.011565752867994,
+ "grad_norm": 0.673768162727356,
+ "learning_rate": 4.882452922838818e-05,
+ "loss": 1.3236,
+ "step": 2011
+ },
+ {
+ "epoch": 2.012566034197118,
+ "grad_norm": 0.6047630310058594,
+ "learning_rate": 4.873449925150455e-05,
+ "loss": 1.1617,
+ "step": 2012
+ },
+ {
+ "epoch": 2.0135663155262415,
+ "grad_norm": 0.5809116363525391,
+ "learning_rate": 4.864452560613485e-05,
+ "loss": 1.1697,
+ "step": 2013
+ },
+ {
+ "epoch": 2.0145665968553654,
+ "grad_norm": 0.7134872078895569,
+ "learning_rate": 4.855460839114392e-05,
+ "loss": 1.1404,
+ "step": 2014
+ },
+ {
+ "epoch": 2.0155668781844893,
+ "grad_norm": 0.6681728959083557,
+ "learning_rate": 4.846474770533446e-05,
+ "loss": 1.1199,
+ "step": 2015
+ },
+ {
+ "epoch": 2.016567159513613,
+ "grad_norm": 0.7536188364028931,
+ "learning_rate": 4.837494364744711e-05,
+ "loss": 1.2809,
+ "step": 2016
+ },
+ {
+ "epoch": 2.017567440842737,
+ "grad_norm": 0.748410701751709,
+ "learning_rate": 4.828519631616037e-05,
+ "loss": 1.1881,
+ "step": 2017
+ },
+ {
+ "epoch": 2.018567722171861,
+ "grad_norm": 0.6426956653594971,
+ "learning_rate": 4.8195505810090246e-05,
+ "loss": 1.1731,
+ "step": 2018
+ },
+ {
+ "epoch": 2.019568003500985,
+ "grad_norm": 0.6496251225471497,
+ "learning_rate": 4.810587222779043e-05,
+ "loss": 0.8958,
+ "step": 2019
+ },
+ {
+ "epoch": 2.0205682848301083,
+ "grad_norm": 0.5894680619239807,
+ "learning_rate": 4.801629566775196e-05,
+ "loss": 1.3354,
+ "step": 2020
+ },
+ {
+ "epoch": 2.021568566159232,
+ "grad_norm": 0.6889281868934631,
+ "learning_rate": 4.792677622840336e-05,
+ "loss": 1.3185,
+ "step": 2021
+ },
+ {
+ "epoch": 2.022568847488356,
+ "grad_norm": 0.7822543978691101,
+ "learning_rate": 4.783731400811022e-05,
+ "loss": 1.281,
+ "step": 2022
+ },
+ {
+ "epoch": 2.02356912881748,
+ "grad_norm": 0.5691697001457214,
+ "learning_rate": 4.774790910517541e-05,
+ "loss": 1.0311,
+ "step": 2023
+ },
+ {
+ "epoch": 2.024569410146604,
+ "grad_norm": 0.5973305702209473,
+ "learning_rate": 4.7658561617838684e-05,
+ "loss": 1.1829,
+ "step": 2024
+ },
+ {
+ "epoch": 2.0255696914757277,
+ "grad_norm": 0.6182019114494324,
+ "learning_rate": 4.756927164427685e-05,
+ "loss": 1.2656,
+ "step": 2025
+ },
+ {
+ "epoch": 2.026569972804851,
+ "grad_norm": 0.5737195611000061,
+ "learning_rate": 4.748003928260335e-05,
+ "loss": 1.2637,
+ "step": 2026
+ },
+ {
+ "epoch": 2.027570254133975,
+ "grad_norm": 0.6673151254653931,
+ "learning_rate": 4.73908646308685e-05,
+ "loss": 1.2574,
+ "step": 2027
+ },
+ {
+ "epoch": 2.028570535463099,
+ "grad_norm": 0.5862526893615723,
+ "learning_rate": 4.730174778705908e-05,
+ "loss": 1.2799,
+ "step": 2028
+ },
+ {
+ "epoch": 2.029570816792223,
+ "grad_norm": 0.5774174928665161,
+ "learning_rate": 4.721268884909833e-05,
+ "loss": 1.2472,
+ "step": 2029
+ },
+ {
+ "epoch": 2.0305710981213467,
+ "grad_norm": 0.6707743406295776,
+ "learning_rate": 4.712368791484597e-05,
+ "loss": 1.101,
+ "step": 2030
+ },
+ {
+ "epoch": 2.0315713794504706,
+ "grad_norm": 0.735133707523346,
+ "learning_rate": 4.703474508209793e-05,
+ "loss": 1.2771,
+ "step": 2031
+ },
+ {
+ "epoch": 2.032571660779594,
+ "grad_norm": 0.8130558729171753,
+ "learning_rate": 4.694586044858633e-05,
+ "loss": 1.1673,
+ "step": 2032
+ },
+ {
+ "epoch": 2.033571942108718,
+ "grad_norm": 0.6132652163505554,
+ "learning_rate": 4.6857034111979235e-05,
+ "loss": 1.1806,
+ "step": 2033
+ },
+ {
+ "epoch": 2.034572223437842,
+ "grad_norm": 0.6277550458908081,
+ "learning_rate": 4.6768266169880804e-05,
+ "loss": 1.2887,
+ "step": 2034
+ },
+ {
+ "epoch": 2.0355725047669657,
+ "grad_norm": 0.744972288608551,
+ "learning_rate": 4.66795567198309e-05,
+ "loss": 1.2582,
+ "step": 2035
+ },
+ {
+ "epoch": 2.0365727860960896,
+ "grad_norm": 1.3557803630828857,
+ "learning_rate": 4.6590905859305135e-05,
+ "loss": 1.1926,
+ "step": 2036
+ },
+ {
+ "epoch": 2.0375730674252135,
+ "grad_norm": 0.6581830978393555,
+ "learning_rate": 4.650231368571486e-05,
+ "loss": 1.1849,
+ "step": 2037
+ },
+ {
+ "epoch": 2.038573348754337,
+ "grad_norm": 0.8084182739257812,
+ "learning_rate": 4.6413780296406764e-05,
+ "loss": 1.3104,
+ "step": 2038
+ },
+ {
+ "epoch": 2.039573630083461,
+ "grad_norm": 0.7013028860092163,
+ "learning_rate": 4.6325305788663096e-05,
+ "loss": 1.4023,
+ "step": 2039
+ },
+ {
+ "epoch": 2.0405739114125847,
+ "grad_norm": 0.5487765073776245,
+ "learning_rate": 4.623689025970128e-05,
+ "loss": 1.1617,
+ "step": 2040
+ },
+ {
+ "epoch": 2.0415741927417086,
+ "grad_norm": 0.6320502758026123,
+ "learning_rate": 4.6148533806674074e-05,
+ "loss": 1.1965,
+ "step": 2041
+ },
+ {
+ "epoch": 2.0425744740708325,
+ "grad_norm": 0.6310489177703857,
+ "learning_rate": 4.606023652666915e-05,
+ "loss": 1.3635,
+ "step": 2042
+ },
+ {
+ "epoch": 2.0435747553999564,
+ "grad_norm": 0.6365699768066406,
+ "learning_rate": 4.597199851670932e-05,
+ "loss": 1.4671,
+ "step": 2043
+ },
+ {
+ "epoch": 2.0445750367290803,
+ "grad_norm": 0.6468609571456909,
+ "learning_rate": 4.5883819873752156e-05,
+ "loss": 1.3253,
+ "step": 2044
+ },
+ {
+ "epoch": 2.0455753180582037,
+ "grad_norm": 0.6045584678649902,
+ "learning_rate": 4.5795700694690046e-05,
+ "loss": 1.4587,
+ "step": 2045
+ },
+ {
+ "epoch": 2.0465755993873276,
+ "grad_norm": 0.6662184596061707,
+ "learning_rate": 4.5707641076350074e-05,
+ "loss": 1.2276,
+ "step": 2046
+ },
+ {
+ "epoch": 2.0475758807164515,
+ "grad_norm": 0.7220503687858582,
+ "learning_rate": 4.5619641115493774e-05,
+ "loss": 1.259,
+ "step": 2047
+ },
+ {
+ "epoch": 2.0485761620455754,
+ "grad_norm": 0.6610327959060669,
+ "learning_rate": 4.553170090881724e-05,
+ "loss": 1.4487,
+ "step": 2048
+ },
+ {
+ "epoch": 2.0495764433746992,
+ "grad_norm": 0.7678585052490234,
+ "learning_rate": 4.54438205529508e-05,
+ "loss": 1.205,
+ "step": 2049
+ },
+ {
+ "epoch": 2.050576724703823,
+ "grad_norm": 1.5810719728469849,
+ "learning_rate": 4.535600014445914e-05,
+ "loss": 1.392,
+ "step": 2050
+ },
+ {
+ "epoch": 2.0515770060329466,
+ "grad_norm": 0.5487958192825317,
+ "learning_rate": 4.5268239779840935e-05,
+ "loss": 1.2389,
+ "step": 2051
+ },
+ {
+ "epoch": 2.0525772873620705,
+ "grad_norm": 0.6395775675773621,
+ "learning_rate": 4.518053955552903e-05,
+ "loss": 1.3683,
+ "step": 2052
+ },
+ {
+ "epoch": 2.0535775686911943,
+ "grad_norm": 0.5679852366447449,
+ "learning_rate": 4.5092899567890035e-05,
+ "loss": 1.1907,
+ "step": 2053
+ },
+ {
+ "epoch": 2.0545778500203182,
+ "grad_norm": 0.6236241459846497,
+ "learning_rate": 4.5005319913224506e-05,
+ "loss": 1.1334,
+ "step": 2054
+ },
+ {
+ "epoch": 2.055578131349442,
+ "grad_norm": 0.7079814672470093,
+ "learning_rate": 4.491780068776663e-05,
+ "loss": 1.2638,
+ "step": 2055
+ },
+ {
+ "epoch": 2.056578412678566,
+ "grad_norm": 0.6677514910697937,
+ "learning_rate": 4.4830341987684166e-05,
+ "loss": 1.11,
+ "step": 2056
+ },
+ {
+ "epoch": 2.0575786940076894,
+ "grad_norm": 0.7721594572067261,
+ "learning_rate": 4.474294390907847e-05,
+ "loss": 1.2271,
+ "step": 2057
+ },
+ {
+ "epoch": 2.0585789753368133,
+ "grad_norm": 1.6144977807998657,
+ "learning_rate": 4.465560654798417e-05,
+ "loss": 1.2506,
+ "step": 2058
+ },
+ {
+ "epoch": 2.059579256665937,
+ "grad_norm": 0.7923681735992432,
+ "learning_rate": 4.4568330000369286e-05,
+ "loss": 1.1595,
+ "step": 2059
+ },
+ {
+ "epoch": 2.060579537995061,
+ "grad_norm": 0.6133561730384827,
+ "learning_rate": 4.448111436213486e-05,
+ "loss": 1.2743,
+ "step": 2060
+ },
+ {
+ "epoch": 2.061579819324185,
+ "grad_norm": 0.6275729537010193,
+ "learning_rate": 4.4393959729115244e-05,
+ "loss": 1.2725,
+ "step": 2061
+ },
+ {
+ "epoch": 2.062580100653309,
+ "grad_norm": 0.6153738498687744,
+ "learning_rate": 4.4306866197077544e-05,
+ "loss": 1.1801,
+ "step": 2062
+ },
+ {
+ "epoch": 2.0635803819824328,
+ "grad_norm": 0.638670027256012,
+ "learning_rate": 4.421983386172178e-05,
+ "loss": 1.2986,
+ "step": 2063
+ },
+ {
+ "epoch": 2.064580663311556,
+ "grad_norm": 0.6230247616767883,
+ "learning_rate": 4.413286281868081e-05,
+ "loss": 1.2258,
+ "step": 2064
+ },
+ {
+ "epoch": 2.06558094464068,
+ "grad_norm": 0.6063693761825562,
+ "learning_rate": 4.404595316352002e-05,
+ "loss": 1.3707,
+ "step": 2065
+ },
+ {
+ "epoch": 2.066581225969804,
+ "grad_norm": 0.5972939729690552,
+ "learning_rate": 4.3959104991737455e-05,
+ "loss": 1.256,
+ "step": 2066
+ },
+ {
+ "epoch": 2.067581507298928,
+ "grad_norm": 0.679032564163208,
+ "learning_rate": 4.387231839876349e-05,
+ "loss": 1.3107,
+ "step": 2067
+ },
+ {
+ "epoch": 2.0685817886280518,
+ "grad_norm": 0.6892005801200867,
+ "learning_rate": 4.3785593479960964e-05,
+ "loss": 1.393,
+ "step": 2068
+ },
+ {
+ "epoch": 2.0695820699571756,
+ "grad_norm": 0.5849416851997375,
+ "learning_rate": 4.369893033062481e-05,
+ "loss": 1.1333,
+ "step": 2069
+ },
+ {
+ "epoch": 2.070582351286299,
+ "grad_norm": 0.6160491108894348,
+ "learning_rate": 4.3612329045982236e-05,
+ "loss": 1.3765,
+ "step": 2070
+ },
+ {
+ "epoch": 2.071582632615423,
+ "grad_norm": 0.6175869703292847,
+ "learning_rate": 4.35257897211923e-05,
+ "loss": 1.383,
+ "step": 2071
+ },
+ {
+ "epoch": 2.072582913944547,
+ "grad_norm": 0.6659654378890991,
+ "learning_rate": 4.343931245134616e-05,
+ "loss": 1.3973,
+ "step": 2072
+ },
+ {
+ "epoch": 2.0735831952736707,
+ "grad_norm": 0.6890555620193481,
+ "learning_rate": 4.335289733146665e-05,
+ "loss": 1.2443,
+ "step": 2073
+ },
+ {
+ "epoch": 2.0745834766027946,
+ "grad_norm": 0.5023195743560791,
+ "learning_rate": 4.326654445650833e-05,
+ "loss": 1.2592,
+ "step": 2074
+ },
+ {
+ "epoch": 2.0755837579319185,
+ "grad_norm": 0.6107515096664429,
+ "learning_rate": 4.3180253921357414e-05,
+ "loss": 1.2949,
+ "step": 2075
+ },
+ {
+ "epoch": 2.076584039261042,
+ "grad_norm": 0.6276577711105347,
+ "learning_rate": 4.309402582083161e-05,
+ "loss": 1.2398,
+ "step": 2076
+ },
+ {
+ "epoch": 2.077584320590166,
+ "grad_norm": 0.6834983825683594,
+ "learning_rate": 4.300786024968003e-05,
+ "loss": 1.213,
+ "step": 2077
+ },
+ {
+ "epoch": 2.0785846019192897,
+ "grad_norm": 0.6413993239402771,
+ "learning_rate": 4.2921757302583e-05,
+ "loss": 1.1989,
+ "step": 2078
+ },
+ {
+ "epoch": 2.0795848832484136,
+ "grad_norm": 0.7132616639137268,
+ "learning_rate": 4.283571707415214e-05,
+ "loss": 1.5454,
+ "step": 2079
+ },
+ {
+ "epoch": 2.0805851645775375,
+ "grad_norm": 0.590647280216217,
+ "learning_rate": 4.274973965893003e-05,
+ "loss": 1.0762,
+ "step": 2080
+ },
+ {
+ "epoch": 2.0815854459066614,
+ "grad_norm": 0.6809400916099548,
+ "learning_rate": 4.266382515139039e-05,
+ "loss": 1.205,
+ "step": 2081
+ },
+ {
+ "epoch": 2.0825857272357853,
+ "grad_norm": 0.6614822745323181,
+ "learning_rate": 4.2577973645937674e-05,
+ "loss": 1.3823,
+ "step": 2082
+ },
+ {
+ "epoch": 2.0835860085649087,
+ "grad_norm": 0.6928898096084595,
+ "learning_rate": 4.2492185236907125e-05,
+ "loss": 1.3693,
+ "step": 2083
+ },
+ {
+ "epoch": 2.0845862898940326,
+ "grad_norm": 0.6004021167755127,
+ "learning_rate": 4.2406460018564765e-05,
+ "loss": 1.3204,
+ "step": 2084
+ },
+ {
+ "epoch": 2.0855865712231565,
+ "grad_norm": 0.5938641428947449,
+ "learning_rate": 4.2320798085107036e-05,
+ "loss": 1.1583,
+ "step": 2085
+ },
+ {
+ "epoch": 2.0865868525522804,
+ "grad_norm": 0.6949739456176758,
+ "learning_rate": 4.223519953066099e-05,
+ "loss": 1.2057,
+ "step": 2086
+ },
+ {
+ "epoch": 2.0875871338814043,
+ "grad_norm": 0.6152746081352234,
+ "learning_rate": 4.214966444928387e-05,
+ "loss": 1.3205,
+ "step": 2087
+ },
+ {
+ "epoch": 2.088587415210528,
+ "grad_norm": 0.6244688630104065,
+ "learning_rate": 4.206419293496333e-05,
+ "loss": 1.2569,
+ "step": 2088
+ },
+ {
+ "epoch": 2.0895876965396516,
+ "grad_norm": 0.6392861008644104,
+ "learning_rate": 4.1978785081617057e-05,
+ "loss": 1.4032,
+ "step": 2089
+ },
+ {
+ "epoch": 2.0905879778687755,
+ "grad_norm": 0.6255760192871094,
+ "learning_rate": 4.1893440983092856e-05,
+ "loss": 1.3172,
+ "step": 2090
+ },
+ {
+ "epoch": 2.0915882591978994,
+ "grad_norm": 0.5918805599212646,
+ "learning_rate": 4.18081607331685e-05,
+ "loss": 1.1431,
+ "step": 2091
+ },
+ {
+ "epoch": 2.0925885405270233,
+ "grad_norm": 0.7690648436546326,
+ "learning_rate": 4.172294442555148e-05,
+ "loss": 1.4107,
+ "step": 2092
+ },
+ {
+ "epoch": 2.093588821856147,
+ "grad_norm": 0.6123422384262085,
+ "learning_rate": 4.1637792153879196e-05,
+ "loss": 1.1535,
+ "step": 2093
+ },
+ {
+ "epoch": 2.094589103185271,
+ "grad_norm": 0.6413111090660095,
+ "learning_rate": 4.15527040117185e-05,
+ "loss": 1.1825,
+ "step": 2094
+ },
+ {
+ "epoch": 2.0955893845143945,
+ "grad_norm": 0.7032740712165833,
+ "learning_rate": 4.146768009256595e-05,
+ "loss": 1.3696,
+ "step": 2095
+ },
+ {
+ "epoch": 2.0965896658435184,
+ "grad_norm": 0.6340607404708862,
+ "learning_rate": 4.13827204898474e-05,
+ "loss": 1.2627,
+ "step": 2096
+ },
+ {
+ "epoch": 2.0975899471726422,
+ "grad_norm": 0.5844863057136536,
+ "learning_rate": 4.129782529691815e-05,
+ "loss": 1.228,
+ "step": 2097
+ },
+ {
+ "epoch": 2.098590228501766,
+ "grad_norm": 0.5953666567802429,
+ "learning_rate": 4.1212994607062594e-05,
+ "loss": 1.3636,
+ "step": 2098
+ },
+ {
+ "epoch": 2.09959050983089,
+ "grad_norm": 0.6116781234741211,
+ "learning_rate": 4.1128228513494385e-05,
+ "loss": 1.3428,
+ "step": 2099
+ },
+ {
+ "epoch": 2.100590791160014,
+ "grad_norm": 0.5654290914535522,
+ "learning_rate": 4.1043527109356095e-05,
+ "loss": 1.3565,
+ "step": 2100
+ },
+ {
+ "epoch": 2.1015910724891373,
+ "grad_norm": 0.6171858906745911,
+ "learning_rate": 4.095889048771922e-05,
+ "loss": 1.4972,
+ "step": 2101
+ },
+ {
+ "epoch": 2.1025913538182612,
+ "grad_norm": 0.6021595001220703,
+ "learning_rate": 4.087431874158416e-05,
+ "loss": 1.2339,
+ "step": 2102
+ },
+ {
+ "epoch": 2.103591635147385,
+ "grad_norm": 0.6666091084480286,
+ "learning_rate": 4.0789811963879906e-05,
+ "loss": 1.2795,
+ "step": 2103
+ },
+ {
+ "epoch": 2.104591916476509,
+ "grad_norm": 0.708927571773529,
+ "learning_rate": 4.070537024746416e-05,
+ "loss": 1.2198,
+ "step": 2104
+ },
+ {
+ "epoch": 2.105592197805633,
+ "grad_norm": 0.6773338913917542,
+ "learning_rate": 4.06209936851231e-05,
+ "loss": 1.1575,
+ "step": 2105
+ },
+ {
+ "epoch": 2.106592479134757,
+ "grad_norm": 0.5854949951171875,
+ "learning_rate": 4.053668236957134e-05,
+ "loss": 1.1772,
+ "step": 2106
+ },
+ {
+ "epoch": 2.1075927604638807,
+ "grad_norm": 0.6273494362831116,
+ "learning_rate": 4.0452436393451735e-05,
+ "loss": 1.0734,
+ "step": 2107
+ },
+ {
+ "epoch": 2.108593041793004,
+ "grad_norm": 0.5946929454803467,
+ "learning_rate": 4.036825584933533e-05,
+ "loss": 1.3508,
+ "step": 2108
+ },
+ {
+ "epoch": 2.109593323122128,
+ "grad_norm": 0.696772575378418,
+ "learning_rate": 4.028414082972141e-05,
+ "loss": 1.4982,
+ "step": 2109
+ },
+ {
+ "epoch": 2.110593604451252,
+ "grad_norm": 0.5967583656311035,
+ "learning_rate": 4.020009142703708e-05,
+ "loss": 1.2575,
+ "step": 2110
+ },
+ {
+ "epoch": 2.1115938857803758,
+ "grad_norm": 0.6711663603782654,
+ "learning_rate": 4.011610773363751e-05,
+ "loss": 1.4659,
+ "step": 2111
+ },
+ {
+ "epoch": 2.1125941671094997,
+ "grad_norm": 0.7978873252868652,
+ "learning_rate": 4.003218984180552e-05,
+ "loss": 1.3647,
+ "step": 2112
+ },
+ {
+ "epoch": 2.1135944484386235,
+ "grad_norm": 0.5708624720573425,
+ "learning_rate": 3.994833784375177e-05,
+ "loss": 1.2855,
+ "step": 2113
+ },
+ {
+ "epoch": 2.114594729767747,
+ "grad_norm": 0.6096190810203552,
+ "learning_rate": 3.986455183161437e-05,
+ "loss": 1.2204,
+ "step": 2114
+ },
+ {
+ "epoch": 2.115595011096871,
+ "grad_norm": 0.5597400665283203,
+ "learning_rate": 3.978083189745907e-05,
+ "loss": 1.1564,
+ "step": 2115
+ },
+ {
+ "epoch": 2.1165952924259948,
+ "grad_norm": 0.6057879328727722,
+ "learning_rate": 3.9697178133278855e-05,
+ "loss": 1.4124,
+ "step": 2116
+ },
+ {
+ "epoch": 2.1175955737551186,
+ "grad_norm": 0.6057745814323425,
+ "learning_rate": 3.961359063099416e-05,
+ "loss": 1.2136,
+ "step": 2117
+ },
+ {
+ "epoch": 2.1185958550842425,
+ "grad_norm": 0.589461088180542,
+ "learning_rate": 3.953006948245247e-05,
+ "loss": 1.0236,
+ "step": 2118
+ },
+ {
+ "epoch": 2.1195961364133664,
+ "grad_norm": 0.6324148774147034,
+ "learning_rate": 3.944661477942844e-05,
+ "loss": 1.1293,
+ "step": 2119
+ },
+ {
+ "epoch": 2.1205964177424903,
+ "grad_norm": 0.6559157967567444,
+ "learning_rate": 3.9363226613623736e-05,
+ "loss": 1.1493,
+ "step": 2120
+ },
+ {
+ "epoch": 2.1215966990716137,
+ "grad_norm": 0.616329550743103,
+ "learning_rate": 3.9279905076666826e-05,
+ "loss": 1.2079,
+ "step": 2121
+ },
+ {
+ "epoch": 2.1225969804007376,
+ "grad_norm": 0.9661369919776917,
+ "learning_rate": 3.9196650260113044e-05,
+ "loss": 1.307,
+ "step": 2122
+ },
+ {
+ "epoch": 2.1235972617298615,
+ "grad_norm": 0.7038710713386536,
+ "learning_rate": 3.9113462255444334e-05,
+ "loss": 1.2016,
+ "step": 2123
+ },
+ {
+ "epoch": 2.1245975430589854,
+ "grad_norm": 0.7221149802207947,
+ "learning_rate": 3.903034115406931e-05,
+ "loss": 1.1263,
+ "step": 2124
+ },
+ {
+ "epoch": 2.1255978243881093,
+ "grad_norm": 0.5545951128005981,
+ "learning_rate": 3.8947287047323e-05,
+ "loss": 1.014,
+ "step": 2125
+ },
+ {
+ "epoch": 2.126598105717233,
+ "grad_norm": 0.7009350657463074,
+ "learning_rate": 3.886430002646688e-05,
+ "loss": 1.3541,
+ "step": 2126
+ },
+ {
+ "epoch": 2.1275983870463566,
+ "grad_norm": 0.7145485281944275,
+ "learning_rate": 3.878138018268866e-05,
+ "loss": 1.2772,
+ "step": 2127
+ },
+ {
+ "epoch": 2.1285986683754805,
+ "grad_norm": 0.5328482985496521,
+ "learning_rate": 3.869852760710222e-05,
+ "loss": 1.536,
+ "step": 2128
+ },
+ {
+ "epoch": 2.1295989497046044,
+ "grad_norm": 0.6539137363433838,
+ "learning_rate": 3.861574239074762e-05,
+ "loss": 1.1556,
+ "step": 2129
+ },
+ {
+ "epoch": 2.1305992310337283,
+ "grad_norm": 0.7259630560874939,
+ "learning_rate": 3.8533024624590776e-05,
+ "loss": 1.4835,
+ "step": 2130
+ },
+ {
+ "epoch": 2.131599512362852,
+ "grad_norm": 0.6348355412483215,
+ "learning_rate": 3.845037439952362e-05,
+ "loss": 1.2484,
+ "step": 2131
+ },
+ {
+ "epoch": 2.132599793691976,
+ "grad_norm": 0.5768122673034668,
+ "learning_rate": 3.836779180636373e-05,
+ "loss": 1.1329,
+ "step": 2132
+ },
+ {
+ "epoch": 2.1336000750210995,
+ "grad_norm": 0.6438059210777283,
+ "learning_rate": 3.828527693585451e-05,
+ "loss": 1.2199,
+ "step": 2133
+ },
+ {
+ "epoch": 2.1346003563502234,
+ "grad_norm": 0.631417989730835,
+ "learning_rate": 3.8202829878664816e-05,
+ "loss": 1.1399,
+ "step": 2134
+ },
+ {
+ "epoch": 2.1356006376793473,
+ "grad_norm": 0.6081330180168152,
+ "learning_rate": 3.812045072538909e-05,
+ "loss": 1.2772,
+ "step": 2135
+ },
+ {
+ "epoch": 2.136600919008471,
+ "grad_norm": 0.5720836520195007,
+ "learning_rate": 3.8038139566547146e-05,
+ "loss": 1.1365,
+ "step": 2136
+ },
+ {
+ "epoch": 2.137601200337595,
+ "grad_norm": 0.7470606565475464,
+ "learning_rate": 3.7955896492584e-05,
+ "loss": 1.2519,
+ "step": 2137
+ },
+ {
+ "epoch": 2.138601481666719,
+ "grad_norm": 0.6497467160224915,
+ "learning_rate": 3.787372159386999e-05,
+ "loss": 1.4294,
+ "step": 2138
+ },
+ {
+ "epoch": 2.1396017629958424,
+ "grad_norm": 0.7447611093521118,
+ "learning_rate": 3.7791614960700395e-05,
+ "loss": 1.2722,
+ "step": 2139
+ },
+ {
+ "epoch": 2.1406020443249663,
+ "grad_norm": 0.7185319066047668,
+ "learning_rate": 3.770957668329562e-05,
+ "loss": 1.436,
+ "step": 2140
+ },
+ {
+ "epoch": 2.14160232565409,
+ "grad_norm": 0.5746670365333557,
+ "learning_rate": 3.7627606851800837e-05,
+ "loss": 1.0138,
+ "step": 2141
+ },
+ {
+ "epoch": 2.142602606983214,
+ "grad_norm": 0.568533182144165,
+ "learning_rate": 3.7545705556286126e-05,
+ "loss": 1.4015,
+ "step": 2142
+ },
+ {
+ "epoch": 2.143602888312338,
+ "grad_norm": 0.5806247591972351,
+ "learning_rate": 3.746387288674613e-05,
+ "loss": 1.2511,
+ "step": 2143
+ },
+ {
+ "epoch": 2.144603169641462,
+ "grad_norm": 0.6733903288841248,
+ "learning_rate": 3.7382108933100234e-05,
+ "loss": 1.4226,
+ "step": 2144
+ },
+ {
+ "epoch": 2.1456034509705857,
+ "grad_norm": 0.6690333485603333,
+ "learning_rate": 3.730041378519216e-05,
+ "loss": 1.4497,
+ "step": 2145
+ },
+ {
+ "epoch": 2.146603732299709,
+ "grad_norm": 0.6218042969703674,
+ "learning_rate": 3.721878753279017e-05,
+ "loss": 1.3474,
+ "step": 2146
+ },
+ {
+ "epoch": 2.147604013628833,
+ "grad_norm": 0.5649903416633606,
+ "learning_rate": 3.713723026558671e-05,
+ "loss": 1.175,
+ "step": 2147
+ },
+ {
+ "epoch": 2.148604294957957,
+ "grad_norm": 0.6123588681221008,
+ "learning_rate": 3.705574207319844e-05,
+ "loss": 1.3606,
+ "step": 2148
+ },
+ {
+ "epoch": 2.149604576287081,
+ "grad_norm": 0.7542588710784912,
+ "learning_rate": 3.697432304516618e-05,
+ "loss": 1.3657,
+ "step": 2149
+ },
+ {
+ "epoch": 2.1506048576162047,
+ "grad_norm": 0.695570170879364,
+ "learning_rate": 3.689297327095472e-05,
+ "loss": 1.2842,
+ "step": 2150
+ },
+ {
+ "epoch": 2.1516051389453286,
+ "grad_norm": 0.6117271780967712,
+ "learning_rate": 3.681169283995279e-05,
+ "loss": 1.2964,
+ "step": 2151
+ },
+ {
+ "epoch": 2.152605420274452,
+ "grad_norm": 0.6640071272850037,
+ "learning_rate": 3.673048184147281e-05,
+ "loss": 1.3073,
+ "step": 2152
+ },
+ {
+ "epoch": 2.153605701603576,
+ "grad_norm": 0.7579565644264221,
+ "learning_rate": 3.664934036475104e-05,
+ "loss": 1.309,
+ "step": 2153
+ },
+ {
+ "epoch": 2.1546059829327,
+ "grad_norm": 0.6235459446907043,
+ "learning_rate": 3.656826849894726e-05,
+ "loss": 1.11,
+ "step": 2154
+ },
+ {
+ "epoch": 2.1556062642618237,
+ "grad_norm": 0.5683732628822327,
+ "learning_rate": 3.648726633314475e-05,
+ "loss": 1.2784,
+ "step": 2155
+ },
+ {
+ "epoch": 2.1566065455909476,
+ "grad_norm": 0.8360922932624817,
+ "learning_rate": 3.640633395635032e-05,
+ "loss": 1.1362,
+ "step": 2156
+ },
+ {
+ "epoch": 2.1576068269200714,
+ "grad_norm": 0.5839766263961792,
+ "learning_rate": 3.632547145749395e-05,
+ "loss": 1.2103,
+ "step": 2157
+ },
+ {
+ "epoch": 2.158607108249195,
+ "grad_norm": 0.6480291485786438,
+ "learning_rate": 3.624467892542895e-05,
+ "loss": 1.0706,
+ "step": 2158
+ },
+ {
+ "epoch": 2.1596073895783188,
+ "grad_norm": 0.5837631821632385,
+ "learning_rate": 3.616395644893166e-05,
+ "loss": 1.2327,
+ "step": 2159
+ },
+ {
+ "epoch": 2.1606076709074427,
+ "grad_norm": 0.6498702168464661,
+ "learning_rate": 3.6083304116701535e-05,
+ "loss": 1.2411,
+ "step": 2160
+ },
+ {
+ "epoch": 2.1616079522365665,
+ "grad_norm": 0.7082285284996033,
+ "learning_rate": 3.600272201736082e-05,
+ "loss": 1.288,
+ "step": 2161
+ },
+ {
+ "epoch": 2.1626082335656904,
+ "grad_norm": 0.7018481492996216,
+ "learning_rate": 3.5922210239454764e-05,
+ "loss": 1.2828,
+ "step": 2162
+ },
+ {
+ "epoch": 2.1636085148948143,
+ "grad_norm": 0.6113139390945435,
+ "learning_rate": 3.5841768871451185e-05,
+ "loss": 1.341,
+ "step": 2163
+ },
+ {
+ "epoch": 2.1646087962239378,
+ "grad_norm": 0.5757367014884949,
+ "learning_rate": 3.57613980017406e-05,
+ "loss": 1.4316,
+ "step": 2164
+ },
+ {
+ "epoch": 2.1656090775530616,
+ "grad_norm": 0.7200785875320435,
+ "learning_rate": 3.568109771863613e-05,
+ "loss": 1.304,
+ "step": 2165
+ },
+ {
+ "epoch": 2.1666093588821855,
+ "grad_norm": 0.5863134264945984,
+ "learning_rate": 3.560086811037316e-05,
+ "loss": 1.2712,
+ "step": 2166
+ },
+ {
+ "epoch": 2.1676096402113094,
+ "grad_norm": 0.6226006746292114,
+ "learning_rate": 3.552070926510962e-05,
+ "loss": 1.2146,
+ "step": 2167
+ },
+ {
+ "epoch": 2.1686099215404333,
+ "grad_norm": 0.6331388354301453,
+ "learning_rate": 3.54406212709255e-05,
+ "loss": 1.3175,
+ "step": 2168
+ },
+ {
+ "epoch": 2.169610202869557,
+ "grad_norm": 0.6935258507728577,
+ "learning_rate": 3.536060421582309e-05,
+ "loss": 1.3687,
+ "step": 2169
+ },
+ {
+ "epoch": 2.170610484198681,
+ "grad_norm": 0.6996686458587646,
+ "learning_rate": 3.52806581877266e-05,
+ "loss": 1.4386,
+ "step": 2170
+ },
+ {
+ "epoch": 2.1716107655278045,
+ "grad_norm": 0.695606529712677,
+ "learning_rate": 3.520078327448232e-05,
+ "loss": 1.1679,
+ "step": 2171
+ },
+ {
+ "epoch": 2.1726110468569284,
+ "grad_norm": 0.6625364422798157,
+ "learning_rate": 3.5120979563858266e-05,
+ "loss": 1.2364,
+ "step": 2172
+ },
+ {
+ "epoch": 2.1736113281860523,
+ "grad_norm": 0.5756232142448425,
+ "learning_rate": 3.5041247143544364e-05,
+ "loss": 1.2565,
+ "step": 2173
+ },
+ {
+ "epoch": 2.174611609515176,
+ "grad_norm": 0.7229663729667664,
+ "learning_rate": 3.496158610115207e-05,
+ "loss": 1.2473,
+ "step": 2174
+ },
+ {
+ "epoch": 2.1756118908443,
+ "grad_norm": 0.6331142783164978,
+ "learning_rate": 3.4881996524214445e-05,
+ "loss": 1.2712,
+ "step": 2175
+ },
+ {
+ "epoch": 2.176612172173424,
+ "grad_norm": 0.7331680655479431,
+ "learning_rate": 3.48024785001861e-05,
+ "loss": 1.3736,
+ "step": 2176
+ },
+ {
+ "epoch": 2.1776124535025474,
+ "grad_norm": 0.745099663734436,
+ "learning_rate": 3.472303211644289e-05,
+ "loss": 1.3546,
+ "step": 2177
+ },
+ {
+ "epoch": 2.1786127348316713,
+ "grad_norm": 0.5532922148704529,
+ "learning_rate": 3.464365746028208e-05,
+ "loss": 1.2695,
+ "step": 2178
+ },
+ {
+ "epoch": 2.179613016160795,
+ "grad_norm": 0.671463131904602,
+ "learning_rate": 3.456435461892203e-05,
+ "loss": 1.1332,
+ "step": 2179
+ },
+ {
+ "epoch": 2.180613297489919,
+ "grad_norm": 0.6440550088882446,
+ "learning_rate": 3.4485123679502274e-05,
+ "loss": 1.2519,
+ "step": 2180
+ },
+ {
+ "epoch": 2.181613578819043,
+ "grad_norm": 0.5422050952911377,
+ "learning_rate": 3.4405964729083254e-05,
+ "loss": 1.3829,
+ "step": 2181
+ },
+ {
+ "epoch": 2.182613860148167,
+ "grad_norm": 0.5920416712760925,
+ "learning_rate": 3.43268778546463e-05,
+ "loss": 1.2442,
+ "step": 2182
+ },
+ {
+ "epoch": 2.1836141414772907,
+ "grad_norm": 0.5842954516410828,
+ "learning_rate": 3.424786314309365e-05,
+ "loss": 1.1633,
+ "step": 2183
+ },
+ {
+ "epoch": 2.184614422806414,
+ "grad_norm": 0.6093557476997375,
+ "learning_rate": 3.416892068124812e-05,
+ "loss": 1.3949,
+ "step": 2184
+ },
+ {
+ "epoch": 2.185614704135538,
+ "grad_norm": 0.6957736015319824,
+ "learning_rate": 3.409005055585327e-05,
+ "loss": 1.3018,
+ "step": 2185
+ },
+ {
+ "epoch": 2.186614985464662,
+ "grad_norm": 0.6803373098373413,
+ "learning_rate": 3.401125285357302e-05,
+ "loss": 1.1555,
+ "step": 2186
+ },
+ {
+ "epoch": 2.187615266793786,
+ "grad_norm": 0.6370871067047119,
+ "learning_rate": 3.393252766099187e-05,
+ "loss": 1.3282,
+ "step": 2187
+ },
+ {
+ "epoch": 2.1886155481229097,
+ "grad_norm": 0.6632930040359497,
+ "learning_rate": 3.3853875064614515e-05,
+ "loss": 1.3476,
+ "step": 2188
+ },
+ {
+ "epoch": 2.1896158294520336,
+ "grad_norm": 0.6875565648078918,
+ "learning_rate": 3.377529515086598e-05,
+ "loss": 1.1416,
+ "step": 2189
+ },
+ {
+ "epoch": 2.190616110781157,
+ "grad_norm": 0.6737555265426636,
+ "learning_rate": 3.369678800609134e-05,
+ "loss": 1.2395,
+ "step": 2190
+ },
+ {
+ "epoch": 2.191616392110281,
+ "grad_norm": 0.6693467497825623,
+ "learning_rate": 3.361835371655578e-05,
+ "loss": 1.2718,
+ "step": 2191
+ },
+ {
+ "epoch": 2.192616673439405,
+ "grad_norm": 0.6017836928367615,
+ "learning_rate": 3.353999236844436e-05,
+ "loss": 1.1916,
+ "step": 2192
+ },
+ {
+ "epoch": 2.1936169547685287,
+ "grad_norm": 0.6228927969932556,
+ "learning_rate": 3.3461704047862054e-05,
+ "loss": 1.3403,
+ "step": 2193
+ },
+ {
+ "epoch": 2.1946172360976526,
+ "grad_norm": 0.5871831178665161,
+ "learning_rate": 3.33834888408336e-05,
+ "loss": 1.1762,
+ "step": 2194
+ },
+ {
+ "epoch": 2.1956175174267765,
+ "grad_norm": 0.6408618688583374,
+ "learning_rate": 3.3305346833303296e-05,
+ "loss": 1.4061,
+ "step": 2195
+ },
+ {
+ "epoch": 2.1966177987559,
+ "grad_norm": 0.6841285824775696,
+ "learning_rate": 3.322727811113516e-05,
+ "loss": 1.4666,
+ "step": 2196
+ },
+ {
+ "epoch": 2.197618080085024,
+ "grad_norm": 0.5706741809844971,
+ "learning_rate": 3.314928276011251e-05,
+ "loss": 1.3368,
+ "step": 2197
+ },
+ {
+ "epoch": 2.1986183614141477,
+ "grad_norm": 0.6349079012870789,
+ "learning_rate": 3.307136086593821e-05,
+ "loss": 1.2458,
+ "step": 2198
+ },
+ {
+ "epoch": 2.1996186427432716,
+ "grad_norm": 0.6701605916023254,
+ "learning_rate": 3.299351251423426e-05,
+ "loss": 1.094,
+ "step": 2199
+ },
+ {
+ "epoch": 2.2006189240723955,
+ "grad_norm": 0.706358015537262,
+ "learning_rate": 3.291573779054199e-05,
+ "loss": 1.4859,
+ "step": 2200
+ },
+ {
+ "epoch": 2.2016192054015193,
+ "grad_norm": 0.6330355405807495,
+ "learning_rate": 3.2838036780321715e-05,
+ "loss": 1.4211,
+ "step": 2201
+ },
+ {
+ "epoch": 2.202619486730643,
+ "grad_norm": 0.6275076866149902,
+ "learning_rate": 3.2760409568952766e-05,
+ "loss": 1.3685,
+ "step": 2202
+ },
+ {
+ "epoch": 2.2036197680597667,
+ "grad_norm": 0.688566267490387,
+ "learning_rate": 3.268285624173347e-05,
+ "loss": 1.3916,
+ "step": 2203
+ },
+ {
+ "epoch": 2.2046200493888906,
+ "grad_norm": 0.569244921207428,
+ "learning_rate": 3.260537688388086e-05,
+ "loss": 1.1975,
+ "step": 2204
+ },
+ {
+ "epoch": 2.2056203307180144,
+ "grad_norm": 0.6800128817558289,
+ "learning_rate": 3.252797158053077e-05,
+ "loss": 1.4201,
+ "step": 2205
+ },
+ {
+ "epoch": 2.2066206120471383,
+ "grad_norm": 0.7519281506538391,
+ "learning_rate": 3.24506404167376e-05,
+ "loss": 1.2746,
+ "step": 2206
+ },
+ {
+ "epoch": 2.207620893376262,
+ "grad_norm": 0.6840569972991943,
+ "learning_rate": 3.2373383477474354e-05,
+ "loss": 1.1698,
+ "step": 2207
+ },
+ {
+ "epoch": 2.208621174705386,
+ "grad_norm": 0.6477528810501099,
+ "learning_rate": 3.229620084763237e-05,
+ "loss": 1.2197,
+ "step": 2208
+ },
+ {
+ "epoch": 2.2096214560345095,
+ "grad_norm": 0.6310058832168579,
+ "learning_rate": 3.221909261202146e-05,
+ "loss": 1.3451,
+ "step": 2209
+ },
+ {
+ "epoch": 2.2106217373636334,
+ "grad_norm": 0.6499499678611755,
+ "learning_rate": 3.214205885536965e-05,
+ "loss": 1.344,
+ "step": 2210
+ },
+ {
+ "epoch": 2.2116220186927573,
+ "grad_norm": 0.6838659644126892,
+ "learning_rate": 3.2065099662323017e-05,
+ "loss": 1.1809,
+ "step": 2211
+ },
+ {
+ "epoch": 2.212622300021881,
+ "grad_norm": 0.5906123518943787,
+ "learning_rate": 3.1988215117445896e-05,
+ "loss": 1.099,
+ "step": 2212
+ },
+ {
+ "epoch": 2.213622581351005,
+ "grad_norm": 0.6088157892227173,
+ "learning_rate": 3.191140530522041e-05,
+ "loss": 1.26,
+ "step": 2213
+ },
+ {
+ "epoch": 2.214622862680129,
+ "grad_norm": 0.6145564317703247,
+ "learning_rate": 3.1834670310046734e-05,
+ "loss": 1.3161,
+ "step": 2214
+ },
+ {
+ "epoch": 2.2156231440092524,
+ "grad_norm": 0.6671096682548523,
+ "learning_rate": 3.1758010216242664e-05,
+ "loss": 1.2084,
+ "step": 2215
+ },
+ {
+ "epoch": 2.2166234253383763,
+ "grad_norm": 0.590133547782898,
+ "learning_rate": 3.168142510804386e-05,
+ "loss": 1.2268,
+ "step": 2216
+ },
+ {
+ "epoch": 2.2176237066675,
+ "grad_norm": 0.6781971454620361,
+ "learning_rate": 3.1604915069603436e-05,
+ "loss": 1.4609,
+ "step": 2217
+ },
+ {
+ "epoch": 2.218623987996624,
+ "grad_norm": 0.6087194085121155,
+ "learning_rate": 3.152848018499215e-05,
+ "loss": 1.2203,
+ "step": 2218
+ },
+ {
+ "epoch": 2.219624269325748,
+ "grad_norm": 0.8006445169448853,
+ "learning_rate": 3.145212053819806e-05,
+ "loss": 1.2055,
+ "step": 2219
+ },
+ {
+ "epoch": 2.220624550654872,
+ "grad_norm": 0.7006067633628845,
+ "learning_rate": 3.137583621312665e-05,
+ "loss": 1.4365,
+ "step": 2220
+ },
+ {
+ "epoch": 2.2216248319839953,
+ "grad_norm": 0.5759941339492798,
+ "learning_rate": 3.1299627293600595e-05,
+ "loss": 1.2277,
+ "step": 2221
+ },
+ {
+ "epoch": 2.222625113313119,
+ "grad_norm": 0.765470564365387,
+ "learning_rate": 3.122349386335964e-05,
+ "loss": 1.3485,
+ "step": 2222
+ },
+ {
+ "epoch": 2.223625394642243,
+ "grad_norm": 0.6199076175689697,
+ "learning_rate": 3.114743600606078e-05,
+ "loss": 1.0602,
+ "step": 2223
+ },
+ {
+ "epoch": 2.224625675971367,
+ "grad_norm": 0.6438413858413696,
+ "learning_rate": 3.107145380527776e-05,
+ "loss": 1.3,
+ "step": 2224
+ },
+ {
+ "epoch": 2.225625957300491,
+ "grad_norm": 0.6275787949562073,
+ "learning_rate": 3.099554734450133e-05,
+ "loss": 1.1412,
+ "step": 2225
+ },
+ {
+ "epoch": 2.2266262386296147,
+ "grad_norm": 0.592475950717926,
+ "learning_rate": 3.091971670713889e-05,
+ "loss": 1.3188,
+ "step": 2226
+ },
+ {
+ "epoch": 2.227626519958738,
+ "grad_norm": 0.6204854249954224,
+ "learning_rate": 3.084396197651468e-05,
+ "loss": 1.3298,
+ "step": 2227
+ },
+ {
+ "epoch": 2.228626801287862,
+ "grad_norm": 0.6952534914016724,
+ "learning_rate": 3.076828323586941e-05,
+ "loss": 1.4366,
+ "step": 2228
+ },
+ {
+ "epoch": 2.229627082616986,
+ "grad_norm": 0.7046433091163635,
+ "learning_rate": 3.06926805683603e-05,
+ "loss": 1.2676,
+ "step": 2229
+ },
+ {
+ "epoch": 2.23062736394611,
+ "grad_norm": 0.7284234166145325,
+ "learning_rate": 3.061715405706106e-05,
+ "loss": 1.4308,
+ "step": 2230
+ },
+ {
+ "epoch": 2.2316276452752337,
+ "grad_norm": 0.7029892206192017,
+ "learning_rate": 3.0541703784961615e-05,
+ "loss": 1.2041,
+ "step": 2231
+ },
+ {
+ "epoch": 2.2326279266043576,
+ "grad_norm": 0.5847492218017578,
+ "learning_rate": 3.0466329834968233e-05,
+ "loss": 1.3313,
+ "step": 2232
+ },
+ {
+ "epoch": 2.2336282079334815,
+ "grad_norm": 0.6810020208358765,
+ "learning_rate": 3.0391032289903188e-05,
+ "loss": 1.3585,
+ "step": 2233
+ },
+ {
+ "epoch": 2.234628489262605,
+ "grad_norm": 0.5531882643699646,
+ "learning_rate": 3.0315811232504922e-05,
+ "loss": 1.1797,
+ "step": 2234
+ },
+ {
+ "epoch": 2.235628770591729,
+ "grad_norm": 0.6164661645889282,
+ "learning_rate": 3.0240666745427713e-05,
+ "loss": 1.049,
+ "step": 2235
+ },
+ {
+ "epoch": 2.2366290519208527,
+ "grad_norm": 0.5976455807685852,
+ "learning_rate": 3.0165598911241832e-05,
+ "loss": 1.0369,
+ "step": 2236
+ },
+ {
+ "epoch": 2.2376293332499766,
+ "grad_norm": 0.7219393849372864,
+ "learning_rate": 3.009060781243319e-05,
+ "loss": 1.3881,
+ "step": 2237
+ },
+ {
+ "epoch": 2.2386296145791005,
+ "grad_norm": 0.6040582656860352,
+ "learning_rate": 3.0015693531403465e-05,
+ "loss": 1.1733,
+ "step": 2238
+ },
+ {
+ "epoch": 2.2396298959082244,
+ "grad_norm": 0.7944032549858093,
+ "learning_rate": 2.994085615046993e-05,
+ "loss": 1.3881,
+ "step": 2239
+ },
+ {
+ "epoch": 2.240630177237348,
+ "grad_norm": 0.7312778234481812,
+ "learning_rate": 2.9866095751865297e-05,
+ "loss": 1.4463,
+ "step": 2240
+ },
+ {
+ "epoch": 2.2416304585664717,
+ "grad_norm": 0.7543103098869324,
+ "learning_rate": 2.979141241773775e-05,
+ "loss": 1.3234,
+ "step": 2241
+ },
+ {
+ "epoch": 2.2426307398955956,
+ "grad_norm": 0.6693204045295715,
+ "learning_rate": 2.971680623015074e-05,
+ "loss": 1.2764,
+ "step": 2242
+ },
+ {
+ "epoch": 2.2436310212247195,
+ "grad_norm": 0.5705234408378601,
+ "learning_rate": 2.9642277271083008e-05,
+ "loss": 1.2161,
+ "step": 2243
+ },
+ {
+ "epoch": 2.2446313025538434,
+ "grad_norm": 0.6513181328773499,
+ "learning_rate": 2.9567825622428358e-05,
+ "loss": 1.3429,
+ "step": 2244
+ },
+ {
+ "epoch": 2.2456315838829672,
+ "grad_norm": 0.684223473072052,
+ "learning_rate": 2.9493451365995737e-05,
+ "loss": 1.2892,
+ "step": 2245
+ },
+ {
+ "epoch": 2.246631865212091,
+ "grad_norm": 0.6515152454376221,
+ "learning_rate": 2.9419154583508978e-05,
+ "loss": 1.3391,
+ "step": 2246
+ },
+ {
+ "epoch": 2.2476321465412146,
+ "grad_norm": 0.7026727199554443,
+ "learning_rate": 2.9344935356606773e-05,
+ "loss": 1.191,
+ "step": 2247
+ },
+ {
+ "epoch": 2.2486324278703385,
+ "grad_norm": 0.6431560516357422,
+ "learning_rate": 2.9270793766842697e-05,
+ "loss": 1.1315,
+ "step": 2248
+ },
+ {
+ "epoch": 2.2496327091994623,
+ "grad_norm": 0.6432946920394897,
+ "learning_rate": 2.9196729895684884e-05,
+ "loss": 1.2303,
+ "step": 2249
+ },
+ {
+ "epoch": 2.2506329905285862,
+ "grad_norm": 0.5639371871948242,
+ "learning_rate": 2.9122743824516195e-05,
+ "loss": 1.2342,
+ "step": 2250
+ },
+ {
+ "epoch": 2.25163327185771,
+ "grad_norm": 0.5914719700813293,
+ "learning_rate": 2.9048835634633887e-05,
+ "loss": 1.1854,
+ "step": 2251
+ },
+ {
+ "epoch": 2.2526335531868336,
+ "grad_norm": 0.6893708109855652,
+ "learning_rate": 2.897500540724972e-05,
+ "loss": 1.3816,
+ "step": 2252
+ },
+ {
+ "epoch": 2.2536338345159574,
+ "grad_norm": 0.5631057620048523,
+ "learning_rate": 2.8901253223489754e-05,
+ "loss": 1.1757,
+ "step": 2253
+ },
+ {
+ "epoch": 2.2546341158450813,
+ "grad_norm": 0.7970195412635803,
+ "learning_rate": 2.8827579164394347e-05,
+ "loss": 1.4732,
+ "step": 2254
+ },
+ {
+ "epoch": 2.255634397174205,
+ "grad_norm": 0.6335131525993347,
+ "learning_rate": 2.875398331091792e-05,
+ "loss": 1.2495,
+ "step": 2255
+ },
+ {
+ "epoch": 2.256634678503329,
+ "grad_norm": 0.7421322464942932,
+ "learning_rate": 2.8680465743928985e-05,
+ "loss": 1.3954,
+ "step": 2256
+ },
+ {
+ "epoch": 2.257634959832453,
+ "grad_norm": 0.6872101426124573,
+ "learning_rate": 2.8607026544210114e-05,
+ "loss": 1.2237,
+ "step": 2257
+ },
+ {
+ "epoch": 2.258635241161577,
+ "grad_norm": 0.7055836915969849,
+ "learning_rate": 2.8533665792457644e-05,
+ "loss": 1.3697,
+ "step": 2258
+ },
+ {
+ "epoch": 2.2596355224907003,
+ "grad_norm": 0.6329922080039978,
+ "learning_rate": 2.8460383569281824e-05,
+ "loss": 1.1614,
+ "step": 2259
+ },
+ {
+ "epoch": 2.260635803819824,
+ "grad_norm": 0.7038082480430603,
+ "learning_rate": 2.8387179955206523e-05,
+ "loss": 1.3674,
+ "step": 2260
+ },
+ {
+ "epoch": 2.261636085148948,
+ "grad_norm": 0.6976034045219421,
+ "learning_rate": 2.831405503066932e-05,
+ "loss": 1.4164,
+ "step": 2261
+ },
+ {
+ "epoch": 2.262636366478072,
+ "grad_norm": 0.6086018681526184,
+ "learning_rate": 2.8241008876021215e-05,
+ "loss": 1.1655,
+ "step": 2262
+ },
+ {
+ "epoch": 2.263636647807196,
+ "grad_norm": 0.6900399327278137,
+ "learning_rate": 2.8168041571526805e-05,
+ "loss": 1.3057,
+ "step": 2263
+ },
+ {
+ "epoch": 2.2646369291363198,
+ "grad_norm": 0.6113269925117493,
+ "learning_rate": 2.8095153197363887e-05,
+ "loss": 1.2436,
+ "step": 2264
+ },
+ {
+ "epoch": 2.265637210465443,
+ "grad_norm": 0.5578724145889282,
+ "learning_rate": 2.8022343833623666e-05,
+ "loss": 1.1389,
+ "step": 2265
+ },
+ {
+ "epoch": 2.266637491794567,
+ "grad_norm": 0.6085034608840942,
+ "learning_rate": 2.7949613560310438e-05,
+ "loss": 1.0843,
+ "step": 2266
+ },
+ {
+ "epoch": 2.267637773123691,
+ "grad_norm": 0.7718783020973206,
+ "learning_rate": 2.787696245734155e-05,
+ "loss": 1.3021,
+ "step": 2267
+ },
+ {
+ "epoch": 2.268638054452815,
+ "grad_norm": 0.7130104303359985,
+ "learning_rate": 2.7804390604547557e-05,
+ "loss": 1.3628,
+ "step": 2268
+ },
+ {
+ "epoch": 2.2696383357819387,
+ "grad_norm": 0.622186005115509,
+ "learning_rate": 2.7731898081671702e-05,
+ "loss": 1.2327,
+ "step": 2269
+ },
+ {
+ "epoch": 2.2706386171110626,
+ "grad_norm": 0.5734781622886658,
+ "learning_rate": 2.765948496837022e-05,
+ "loss": 1.3696,
+ "step": 2270
+ },
+ {
+ "epoch": 2.2716388984401865,
+ "grad_norm": 0.7205266952514648,
+ "learning_rate": 2.758715134421197e-05,
+ "loss": 1.3049,
+ "step": 2271
+ },
+ {
+ "epoch": 2.27263917976931,
+ "grad_norm": 0.7254026532173157,
+ "learning_rate": 2.7514897288678578e-05,
+ "loss": 1.2403,
+ "step": 2272
+ },
+ {
+ "epoch": 2.273639461098434,
+ "grad_norm": 0.6365311741828918,
+ "learning_rate": 2.744272288116416e-05,
+ "loss": 1.2462,
+ "step": 2273
+ },
+ {
+ "epoch": 2.2746397424275577,
+ "grad_norm": 0.6495359539985657,
+ "learning_rate": 2.7370628200975302e-05,
+ "loss": 1.3269,
+ "step": 2274
+ },
+ {
+ "epoch": 2.2756400237566816,
+ "grad_norm": 0.6046035289764404,
+ "learning_rate": 2.729861332733108e-05,
+ "loss": 1.0957,
+ "step": 2275
+ },
+ {
+ "epoch": 2.2766403050858055,
+ "grad_norm": 0.6337087750434875,
+ "learning_rate": 2.7226678339362755e-05,
+ "loss": 1.1258,
+ "step": 2276
+ },
+ {
+ "epoch": 2.2776405864149294,
+ "grad_norm": 0.6914265751838684,
+ "learning_rate": 2.7154823316113932e-05,
+ "loss": 1.1727,
+ "step": 2277
+ },
+ {
+ "epoch": 2.278640867744053,
+ "grad_norm": 0.5778425335884094,
+ "learning_rate": 2.708304833654023e-05,
+ "loss": 1.2652,
+ "step": 2278
+ },
+ {
+ "epoch": 2.2796411490731767,
+ "grad_norm": 0.6359294056892395,
+ "learning_rate": 2.7011353479509426e-05,
+ "loss": 1.1769,
+ "step": 2279
+ },
+ {
+ "epoch": 2.2806414304023006,
+ "grad_norm": 0.672590434551239,
+ "learning_rate": 2.693973882380114e-05,
+ "loss": 1.5127,
+ "step": 2280
+ },
+ {
+ "epoch": 2.2816417117314245,
+ "grad_norm": 0.6255220770835876,
+ "learning_rate": 2.686820444810696e-05,
+ "loss": 1.0315,
+ "step": 2281
+ },
+ {
+ "epoch": 2.2826419930605484,
+ "grad_norm": 0.6508489847183228,
+ "learning_rate": 2.679675043103026e-05,
+ "loss": 1.3174,
+ "step": 2282
+ },
+ {
+ "epoch": 2.2836422743896723,
+ "grad_norm": 0.6855418086051941,
+ "learning_rate": 2.6725376851086025e-05,
+ "loss": 1.4684,
+ "step": 2283
+ },
+ {
+ "epoch": 2.284642555718796,
+ "grad_norm": 0.5959590673446655,
+ "learning_rate": 2.6654083786700955e-05,
+ "loss": 1.081,
+ "step": 2284
+ },
+ {
+ "epoch": 2.2856428370479196,
+ "grad_norm": 0.6854454874992371,
+ "learning_rate": 2.6582871316213198e-05,
+ "loss": 1.3547,
+ "step": 2285
+ },
+ {
+ "epoch": 2.2866431183770435,
+ "grad_norm": 0.6475023031234741,
+ "learning_rate": 2.6511739517872426e-05,
+ "loss": 1.1388,
+ "step": 2286
+ },
+ {
+ "epoch": 2.2876433997061674,
+ "grad_norm": 0.6125039458274841,
+ "learning_rate": 2.644068846983956e-05,
+ "loss": 1.0777,
+ "step": 2287
+ },
+ {
+ "epoch": 2.2886436810352913,
+ "grad_norm": 0.641226589679718,
+ "learning_rate": 2.6369718250186914e-05,
+ "loss": 1.3094,
+ "step": 2288
+ },
+ {
+ "epoch": 2.289643962364415,
+ "grad_norm": 0.6123669743537903,
+ "learning_rate": 2.6298828936897867e-05,
+ "loss": 1.0834,
+ "step": 2289
+ },
+ {
+ "epoch": 2.2906442436935386,
+ "grad_norm": 0.7510077357292175,
+ "learning_rate": 2.622802060786702e-05,
+ "loss": 1.3612,
+ "step": 2290
+ },
+ {
+ "epoch": 2.2916445250226625,
+ "grad_norm": 0.6886245012283325,
+ "learning_rate": 2.6157293340899857e-05,
+ "loss": 1.2483,
+ "step": 2291
+ },
+ {
+ "epoch": 2.2926448063517864,
+ "grad_norm": 0.5994114279747009,
+ "learning_rate": 2.60866472137129e-05,
+ "loss": 1.2326,
+ "step": 2292
+ },
+ {
+ "epoch": 2.2936450876809102,
+ "grad_norm": 0.6197491884231567,
+ "learning_rate": 2.6016082303933454e-05,
+ "loss": 1.1057,
+ "step": 2293
+ },
+ {
+ "epoch": 2.294645369010034,
+ "grad_norm": 0.7268143892288208,
+ "learning_rate": 2.594559868909956e-05,
+ "loss": 1.4143,
+ "step": 2294
+ },
+ {
+ "epoch": 2.295645650339158,
+ "grad_norm": 0.6396143436431885,
+ "learning_rate": 2.587519644666001e-05,
+ "loss": 1.1143,
+ "step": 2295
+ },
+ {
+ "epoch": 2.296645931668282,
+ "grad_norm": 0.6493250727653503,
+ "learning_rate": 2.580487565397406e-05,
+ "loss": 1.2665,
+ "step": 2296
+ },
+ {
+ "epoch": 2.2976462129974053,
+ "grad_norm": 0.6070438027381897,
+ "learning_rate": 2.573463638831166e-05,
+ "loss": 1.1437,
+ "step": 2297
+ },
+ {
+ "epoch": 2.2986464943265292,
+ "grad_norm": 0.6047723889350891,
+ "learning_rate": 2.566447872685298e-05,
+ "loss": 1.2397,
+ "step": 2298
+ },
+ {
+ "epoch": 2.299646775655653,
+ "grad_norm": 0.6956603527069092,
+ "learning_rate": 2.559440274668864e-05,
+ "loss": 1.4726,
+ "step": 2299
+ },
+ {
+ "epoch": 2.300647056984777,
+ "grad_norm": 0.724605143070221,
+ "learning_rate": 2.5524408524819453e-05,
+ "loss": 1.2286,
+ "step": 2300
+ },
+ {
+ "epoch": 2.301647338313901,
+ "grad_norm": 0.5969868898391724,
+ "learning_rate": 2.545449613815639e-05,
+ "loss": 1.1894,
+ "step": 2301
+ },
+ {
+ "epoch": 2.302647619643025,
+ "grad_norm": 0.5893451571464539,
+ "learning_rate": 2.5384665663520558e-05,
+ "loss": 1.3669,
+ "step": 2302
+ },
+ {
+ "epoch": 2.303647900972148,
+ "grad_norm": 0.6300607323646545,
+ "learning_rate": 2.5314917177642972e-05,
+ "loss": 1.4301,
+ "step": 2303
+ },
+ {
+ "epoch": 2.304648182301272,
+ "grad_norm": 0.5928812026977539,
+ "learning_rate": 2.5245250757164663e-05,
+ "loss": 1.5703,
+ "step": 2304
+ },
+ {
+ "epoch": 2.305648463630396,
+ "grad_norm": 0.5831782221794128,
+ "learning_rate": 2.5175666478636374e-05,
+ "loss": 1.3643,
+ "step": 2305
+ },
+ {
+ "epoch": 2.30664874495952,
+ "grad_norm": 0.7637529373168945,
+ "learning_rate": 2.5106164418518686e-05,
+ "loss": 1.4751,
+ "step": 2306
+ },
+ {
+ "epoch": 2.3076490262886438,
+ "grad_norm": 0.6256185173988342,
+ "learning_rate": 2.5036744653181753e-05,
+ "loss": 1.4159,
+ "step": 2307
+ },
+ {
+ "epoch": 2.3086493076177677,
+ "grad_norm": 0.7415730953216553,
+ "learning_rate": 2.4967407258905385e-05,
+ "loss": 1.2233,
+ "step": 2308
+ },
+ {
+ "epoch": 2.3096495889468915,
+ "grad_norm": 0.639265239238739,
+ "learning_rate": 2.48981523118788e-05,
+ "loss": 1.3055,
+ "step": 2309
+ },
+ {
+ "epoch": 2.310649870276015,
+ "grad_norm": 0.6294757127761841,
+ "learning_rate": 2.4828979888200698e-05,
+ "loss": 1.4187,
+ "step": 2310
+ },
+ {
+ "epoch": 2.311650151605139,
+ "grad_norm": 0.6178464293479919,
+ "learning_rate": 2.475989006387901e-05,
+ "loss": 1.3641,
+ "step": 2311
+ },
+ {
+ "epoch": 2.3126504329342628,
+ "grad_norm": 0.593035876750946,
+ "learning_rate": 2.4690882914831004e-05,
+ "loss": 1.248,
+ "step": 2312
+ },
+ {
+ "epoch": 2.3136507142633866,
+ "grad_norm": 0.6418329477310181,
+ "learning_rate": 2.462195851688306e-05,
+ "loss": 1.2338,
+ "step": 2313
+ },
+ {
+ "epoch": 2.3146509955925105,
+ "grad_norm": 0.579680323600769,
+ "learning_rate": 2.4553116945770583e-05,
+ "loss": 1.2199,
+ "step": 2314
+ },
+ {
+ "epoch": 2.315651276921634,
+ "grad_norm": 0.7126612663269043,
+ "learning_rate": 2.4484358277138065e-05,
+ "loss": 1.3096,
+ "step": 2315
+ },
+ {
+ "epoch": 2.316651558250758,
+ "grad_norm": 0.7200229167938232,
+ "learning_rate": 2.441568258653879e-05,
+ "loss": 1.3489,
+ "step": 2316
+ },
+ {
+ "epoch": 2.3176518395798817,
+ "grad_norm": 0.654485821723938,
+ "learning_rate": 2.4347089949434988e-05,
+ "loss": 1.2585,
+ "step": 2317
+ },
+ {
+ "epoch": 2.3186521209090056,
+ "grad_norm": 0.6139536499977112,
+ "learning_rate": 2.4278580441197484e-05,
+ "loss": 1.3096,
+ "step": 2318
+ },
+ {
+ "epoch": 2.3196524022381295,
+ "grad_norm": 0.6222895383834839,
+ "learning_rate": 2.421015413710591e-05,
+ "loss": 1.2969,
+ "step": 2319
+ },
+ {
+ "epoch": 2.3206526835672534,
+ "grad_norm": 0.73511803150177,
+ "learning_rate": 2.4141811112348377e-05,
+ "loss": 1.3704,
+ "step": 2320
+ },
+ {
+ "epoch": 2.3216529648963773,
+ "grad_norm": 0.5469374060630798,
+ "learning_rate": 2.407355144202147e-05,
+ "loss": 1.267,
+ "step": 2321
+ },
+ {
+ "epoch": 2.3226532462255007,
+ "grad_norm": 0.7390980124473572,
+ "learning_rate": 2.4005375201130274e-05,
+ "loss": 1.1809,
+ "step": 2322
+ },
+ {
+ "epoch": 2.3236535275546246,
+ "grad_norm": 0.6703569889068604,
+ "learning_rate": 2.3937282464588108e-05,
+ "loss": 1.4966,
+ "step": 2323
+ },
+ {
+ "epoch": 2.3246538088837485,
+ "grad_norm": 0.6065950989723206,
+ "learning_rate": 2.3869273307216612e-05,
+ "loss": 1.3386,
+ "step": 2324
+ },
+ {
+ "epoch": 2.3256540902128724,
+ "grad_norm": 0.6592161655426025,
+ "learning_rate": 2.3801347803745512e-05,
+ "loss": 1.4153,
+ "step": 2325
+ },
+ {
+ "epoch": 2.3266543715419963,
+ "grad_norm": 0.7319929003715515,
+ "learning_rate": 2.3733506028812658e-05,
+ "loss": 1.3225,
+ "step": 2326
+ },
+ {
+ "epoch": 2.32765465287112,
+ "grad_norm": 0.8160943984985352,
+ "learning_rate": 2.3665748056963956e-05,
+ "loss": 1.2414,
+ "step": 2327
+ },
+ {
+ "epoch": 2.3286549342002436,
+ "grad_norm": 0.7314315438270569,
+ "learning_rate": 2.3598073962653066e-05,
+ "loss": 1.3169,
+ "step": 2328
+ },
+ {
+ "epoch": 2.3296552155293675,
+ "grad_norm": 0.5869148969650269,
+ "learning_rate": 2.3530483820241656e-05,
+ "loss": 1.1076,
+ "step": 2329
+ },
+ {
+ "epoch": 2.3306554968584914,
+ "grad_norm": 0.6157956123352051,
+ "learning_rate": 2.3462977703999023e-05,
+ "loss": 1.3961,
+ "step": 2330
+ },
+ {
+ "epoch": 2.3316557781876153,
+ "grad_norm": 0.6022456288337708,
+ "learning_rate": 2.339555568810221e-05,
+ "loss": 1.0627,
+ "step": 2331
+ },
+ {
+ "epoch": 2.332656059516739,
+ "grad_norm": 0.5729643702507019,
+ "learning_rate": 2.332821784663578e-05,
+ "loss": 1.0171,
+ "step": 2332
+ },
+ {
+ "epoch": 2.333656340845863,
+ "grad_norm": 0.6669082641601562,
+ "learning_rate": 2.3260964253591898e-05,
+ "loss": 1.3516,
+ "step": 2333
+ },
+ {
+ "epoch": 2.334656622174987,
+ "grad_norm": 0.6814001798629761,
+ "learning_rate": 2.3193794982870044e-05,
+ "loss": 1.181,
+ "step": 2334
+ },
+ {
+ "epoch": 2.3356569035041104,
+ "grad_norm": 0.6704903841018677,
+ "learning_rate": 2.312671010827715e-05,
+ "loss": 1.2422,
+ "step": 2335
+ },
+ {
+ "epoch": 2.3366571848332343,
+ "grad_norm": 0.5573928952217102,
+ "learning_rate": 2.30597097035273e-05,
+ "loss": 1.2298,
+ "step": 2336
+ },
+ {
+ "epoch": 2.337657466162358,
+ "grad_norm": 0.6623947024345398,
+ "learning_rate": 2.29927938422419e-05,
+ "loss": 1.181,
+ "step": 2337
+ },
+ {
+ "epoch": 2.338657747491482,
+ "grad_norm": 0.6784799695014954,
+ "learning_rate": 2.2925962597949302e-05,
+ "loss": 1.4118,
+ "step": 2338
+ },
+ {
+ "epoch": 2.339658028820606,
+ "grad_norm": 0.6137337684631348,
+ "learning_rate": 2.285921604408502e-05,
+ "loss": 1.2455,
+ "step": 2339
+ },
+ {
+ "epoch": 2.34065831014973,
+ "grad_norm": 0.5788987278938293,
+ "learning_rate": 2.2792554253991415e-05,
+ "loss": 1.1512,
+ "step": 2340
+ },
+ {
+ "epoch": 2.3416585914788532,
+ "grad_norm": 0.6471617817878723,
+ "learning_rate": 2.272597730091769e-05,
+ "loss": 1.1491,
+ "step": 2341
+ },
+ {
+ "epoch": 2.342658872807977,
+ "grad_norm": 0.7257328629493713,
+ "learning_rate": 2.2659485258019976e-05,
+ "loss": 1.2717,
+ "step": 2342
+ },
+ {
+ "epoch": 2.343659154137101,
+ "grad_norm": 0.678607702255249,
+ "learning_rate": 2.259307819836093e-05,
+ "loss": 1.3263,
+ "step": 2343
+ },
+ {
+ "epoch": 2.344659435466225,
+ "grad_norm": 0.6241843104362488,
+ "learning_rate": 2.252675619490996e-05,
+ "loss": 1.3422,
+ "step": 2344
+ },
+ {
+ "epoch": 2.345659716795349,
+ "grad_norm": 0.6813640594482422,
+ "learning_rate": 2.2460519320542883e-05,
+ "loss": 1.2869,
+ "step": 2345
+ },
+ {
+ "epoch": 2.3466599981244727,
+ "grad_norm": 0.6423537731170654,
+ "learning_rate": 2.2394367648042102e-05,
+ "loss": 1.2624,
+ "step": 2346
+ },
+ {
+ "epoch": 2.3476602794535966,
+ "grad_norm": 0.6376233100891113,
+ "learning_rate": 2.2328301250096327e-05,
+ "loss": 1.0929,
+ "step": 2347
+ },
+ {
+ "epoch": 2.34866056078272,
+ "grad_norm": 0.6261239647865295,
+ "learning_rate": 2.2262320199300557e-05,
+ "loss": 1.1157,
+ "step": 2348
+ },
+ {
+ "epoch": 2.349660842111844,
+ "grad_norm": 0.6539388298988342,
+ "learning_rate": 2.2196424568156073e-05,
+ "loss": 1.2641,
+ "step": 2349
+ },
+ {
+ "epoch": 2.350661123440968,
+ "grad_norm": 0.6082950234413147,
+ "learning_rate": 2.2130614429070207e-05,
+ "loss": 1.269,
+ "step": 2350
+ },
+ {
+ "epoch": 2.3516614047700917,
+ "grad_norm": 0.6362713575363159,
+ "learning_rate": 2.206488985435645e-05,
+ "loss": 1.1818,
+ "step": 2351
+ },
+ {
+ "epoch": 2.3526616860992156,
+ "grad_norm": 0.6356920599937439,
+ "learning_rate": 2.199925091623418e-05,
+ "loss": 1.3691,
+ "step": 2352
+ },
+ {
+ "epoch": 2.353661967428339,
+ "grad_norm": 0.7058550715446472,
+ "learning_rate": 2.193369768682877e-05,
+ "loss": 1.2488,
+ "step": 2353
+ },
+ {
+ "epoch": 2.354662248757463,
+ "grad_norm": 0.6002040505409241,
+ "learning_rate": 2.1868230238171293e-05,
+ "loss": 1.2003,
+ "step": 2354
+ },
+ {
+ "epoch": 2.3556625300865868,
+ "grad_norm": 0.5998533368110657,
+ "learning_rate": 2.1802848642198692e-05,
+ "loss": 1.317,
+ "step": 2355
+ },
+ {
+ "epoch": 2.3566628114157107,
+ "grad_norm": 0.7087720036506653,
+ "learning_rate": 2.1737552970753526e-05,
+ "loss": 1.3822,
+ "step": 2356
+ },
+ {
+ "epoch": 2.3576630927448345,
+ "grad_norm": 0.6583616733551025,
+ "learning_rate": 2.1672343295583873e-05,
+ "loss": 1.2539,
+ "step": 2357
+ },
+ {
+ "epoch": 2.3586633740739584,
+ "grad_norm": 0.7159150242805481,
+ "learning_rate": 2.160721968834344e-05,
+ "loss": 1.3049,
+ "step": 2358
+ },
+ {
+ "epoch": 2.3596636554030823,
+ "grad_norm": 0.7452221512794495,
+ "learning_rate": 2.154218222059122e-05,
+ "loss": 1.2626,
+ "step": 2359
+ },
+ {
+ "epoch": 2.3606639367322058,
+ "grad_norm": 0.6934370398521423,
+ "learning_rate": 2.1477230963791706e-05,
+ "loss": 1.2278,
+ "step": 2360
+ },
+ {
+ "epoch": 2.3616642180613296,
+ "grad_norm": 0.6518145799636841,
+ "learning_rate": 2.141236598931451e-05,
+ "loss": 1.3014,
+ "step": 2361
+ },
+ {
+ "epoch": 2.3626644993904535,
+ "grad_norm": 0.712419867515564,
+ "learning_rate": 2.1347587368434575e-05,
+ "loss": 1.2546,
+ "step": 2362
+ },
+ {
+ "epoch": 2.3636647807195774,
+ "grad_norm": 0.6252943873405457,
+ "learning_rate": 2.1282895172331817e-05,
+ "loss": 1.3629,
+ "step": 2363
+ },
+ {
+ "epoch": 2.3646650620487013,
+ "grad_norm": 0.7917761206626892,
+ "learning_rate": 2.1218289472091336e-05,
+ "loss": 1.0265,
+ "step": 2364
+ },
+ {
+ "epoch": 2.365665343377825,
+ "grad_norm": 0.6771757006645203,
+ "learning_rate": 2.115377033870305e-05,
+ "loss": 1.141,
+ "step": 2365
+ },
+ {
+ "epoch": 2.3666656247069486,
+ "grad_norm": 0.642998993396759,
+ "learning_rate": 2.1089337843061863e-05,
+ "loss": 1.4206,
+ "step": 2366
+ },
+ {
+ "epoch": 2.3676659060360725,
+ "grad_norm": 0.5679053664207458,
+ "learning_rate": 2.102499205596743e-05,
+ "loss": 1.2185,
+ "step": 2367
+ },
+ {
+ "epoch": 2.3686661873651964,
+ "grad_norm": 0.5789163708686829,
+ "learning_rate": 2.0960733048124083e-05,
+ "loss": 1.0651,
+ "step": 2368
+ },
+ {
+ "epoch": 2.3696664686943203,
+ "grad_norm": 0.621918261051178,
+ "learning_rate": 2.0896560890140913e-05,
+ "loss": 1.3253,
+ "step": 2369
+ },
+ {
+ "epoch": 2.370666750023444,
+ "grad_norm": 0.6738060712814331,
+ "learning_rate": 2.0832475652531447e-05,
+ "loss": 1.2178,
+ "step": 2370
+ },
+ {
+ "epoch": 2.371667031352568,
+ "grad_norm": 0.6864463686943054,
+ "learning_rate": 2.076847740571387e-05,
+ "loss": 1.0683,
+ "step": 2371
+ },
+ {
+ "epoch": 2.372667312681692,
+ "grad_norm": 0.683365523815155,
+ "learning_rate": 2.070456622001059e-05,
+ "loss": 1.2512,
+ "step": 2372
+ },
+ {
+ "epoch": 2.3736675940108154,
+ "grad_norm": 0.7128761410713196,
+ "learning_rate": 2.064074216564852e-05,
+ "loss": 1.3049,
+ "step": 2373
+ },
+ {
+ "epoch": 2.3746678753399393,
+ "grad_norm": 0.7912024259567261,
+ "learning_rate": 2.0577005312758703e-05,
+ "loss": 1.4466,
+ "step": 2374
+ },
+ {
+ "epoch": 2.375668156669063,
+ "grad_norm": 0.642785370349884,
+ "learning_rate": 2.0513355731376395e-05,
+ "loss": 1.349,
+ "step": 2375
+ },
+ {
+ "epoch": 2.376668437998187,
+ "grad_norm": 0.6518558859825134,
+ "learning_rate": 2.0449793491441028e-05,
+ "loss": 1.3863,
+ "step": 2376
+ },
+ {
+ "epoch": 2.377668719327311,
+ "grad_norm": 0.5721689462661743,
+ "learning_rate": 2.0386318662795957e-05,
+ "loss": 1.3933,
+ "step": 2377
+ },
+ {
+ "epoch": 2.3786690006564344,
+ "grad_norm": 0.6657408475875854,
+ "learning_rate": 2.0322931315188586e-05,
+ "loss": 1.1653,
+ "step": 2378
+ },
+ {
+ "epoch": 2.3796692819855583,
+ "grad_norm": 0.6022714972496033,
+ "learning_rate": 2.0259631518270105e-05,
+ "loss": 1.2296,
+ "step": 2379
+ },
+ {
+ "epoch": 2.380669563314682,
+ "grad_norm": 0.7442004680633545,
+ "learning_rate": 2.0196419341595595e-05,
+ "loss": 1.2062,
+ "step": 2380
+ },
+ {
+ "epoch": 2.381669844643806,
+ "grad_norm": 0.5507898926734924,
+ "learning_rate": 2.013329485462374e-05,
+ "loss": 1.1901,
+ "step": 2381
+ },
+ {
+ "epoch": 2.38267012597293,
+ "grad_norm": 0.6922757625579834,
+ "learning_rate": 2.0070258126717e-05,
+ "loss": 1.3193,
+ "step": 2382
+ },
+ {
+ "epoch": 2.383670407302054,
+ "grad_norm": 0.5862204432487488,
+ "learning_rate": 2.000730922714128e-05,
+ "loss": 1.3149,
+ "step": 2383
+ },
+ {
+ "epoch": 2.3846706886311777,
+ "grad_norm": 0.6098889708518982,
+ "learning_rate": 1.9944448225066093e-05,
+ "loss": 1.2777,
+ "step": 2384
+ },
+ {
+ "epoch": 2.385670969960301,
+ "grad_norm": 0.6173763871192932,
+ "learning_rate": 1.9881675189564254e-05,
+ "loss": 1.2043,
+ "step": 2385
+ },
+ {
+ "epoch": 2.386671251289425,
+ "grad_norm": 0.6959353089332581,
+ "learning_rate": 1.981899018961202e-05,
+ "loss": 1.0934,
+ "step": 2386
+ },
+ {
+ "epoch": 2.387671532618549,
+ "grad_norm": 0.6258510947227478,
+ "learning_rate": 1.975639329408887e-05,
+ "loss": 1.2021,
+ "step": 2387
+ },
+ {
+ "epoch": 2.388671813947673,
+ "grad_norm": 0.6541923880577087,
+ "learning_rate": 1.9693884571777432e-05,
+ "loss": 1.1109,
+ "step": 2388
+ },
+ {
+ "epoch": 2.3896720952767967,
+ "grad_norm": 0.6073365807533264,
+ "learning_rate": 1.963146409136354e-05,
+ "loss": 1.1153,
+ "step": 2389
+ },
+ {
+ "epoch": 2.3906723766059206,
+ "grad_norm": 0.5515630841255188,
+ "learning_rate": 1.9569131921435956e-05,
+ "loss": 1.1823,
+ "step": 2390
+ },
+ {
+ "epoch": 2.391672657935044,
+ "grad_norm": 0.7065924406051636,
+ "learning_rate": 1.950688813048652e-05,
+ "loss": 1.1865,
+ "step": 2391
+ },
+ {
+ "epoch": 2.392672939264168,
+ "grad_norm": 0.585737407207489,
+ "learning_rate": 1.944473278690986e-05,
+ "loss": 1.2334,
+ "step": 2392
+ },
+ {
+ "epoch": 2.393673220593292,
+ "grad_norm": 0.6631129384040833,
+ "learning_rate": 1.9382665959003477e-05,
+ "loss": 1.2752,
+ "step": 2393
+ },
+ {
+ "epoch": 2.3946735019224157,
+ "grad_norm": 0.711269199848175,
+ "learning_rate": 1.93206877149676e-05,
+ "loss": 1.1615,
+ "step": 2394
+ },
+ {
+ "epoch": 2.3956737832515396,
+ "grad_norm": 0.640571653842926,
+ "learning_rate": 1.9258798122905064e-05,
+ "loss": 1.2068,
+ "step": 2395
+ },
+ {
+ "epoch": 2.3966740645806635,
+ "grad_norm": 0.6581786274909973,
+ "learning_rate": 1.9196997250821392e-05,
+ "loss": 1.2603,
+ "step": 2396
+ },
+ {
+ "epoch": 2.3976743459097873,
+ "grad_norm": 0.7250068783760071,
+ "learning_rate": 1.913528516662452e-05,
+ "loss": 1.4225,
+ "step": 2397
+ },
+ {
+ "epoch": 2.398674627238911,
+ "grad_norm": 0.5822978615760803,
+ "learning_rate": 1.907366193812491e-05,
+ "loss": 1.0978,
+ "step": 2398
+ },
+ {
+ "epoch": 2.3996749085680347,
+ "grad_norm": 0.7264160513877869,
+ "learning_rate": 1.9012127633035305e-05,
+ "loss": 1.2794,
+ "step": 2399
+ },
+ {
+ "epoch": 2.4006751898971586,
+ "grad_norm": 0.6197434067726135,
+ "learning_rate": 1.895068231897079e-05,
+ "loss": 1.2566,
+ "step": 2400
+ },
+ {
+ "epoch": 2.4016754712262824,
+ "grad_norm": 0.6504030823707581,
+ "learning_rate": 1.8889326063448697e-05,
+ "loss": 1.058,
+ "step": 2401
+ },
+ {
+ "epoch": 2.4026757525554063,
+ "grad_norm": 0.6449025869369507,
+ "learning_rate": 1.8828058933888392e-05,
+ "loss": 1.4013,
+ "step": 2402
+ },
+ {
+ "epoch": 2.40367603388453,
+ "grad_norm": 0.5803359746932983,
+ "learning_rate": 1.8766880997611424e-05,
+ "loss": 1.3732,
+ "step": 2403
+ },
+ {
+ "epoch": 2.4046763152136537,
+ "grad_norm": 0.6420122385025024,
+ "learning_rate": 1.870579232184122e-05,
+ "loss": 1.2435,
+ "step": 2404
+ },
+ {
+ "epoch": 2.4056765965427775,
+ "grad_norm": 0.6524025797843933,
+ "learning_rate": 1.864479297370325e-05,
+ "loss": 1.0731,
+ "step": 2405
+ },
+ {
+ "epoch": 2.4066768778719014,
+ "grad_norm": 0.6313955187797546,
+ "learning_rate": 1.8583883020224724e-05,
+ "loss": 1.315,
+ "step": 2406
+ },
+ {
+ "epoch": 2.4076771592010253,
+ "grad_norm": 0.6714106202125549,
+ "learning_rate": 1.8523062528334688e-05,
+ "loss": 1.207,
+ "step": 2407
+ },
+ {
+ "epoch": 2.408677440530149,
+ "grad_norm": 0.7873541116714478,
+ "learning_rate": 1.8462331564863832e-05,
+ "loss": 1.4697,
+ "step": 2408
+ },
+ {
+ "epoch": 2.409677721859273,
+ "grad_norm": 0.6541167497634888,
+ "learning_rate": 1.8401690196544552e-05,
+ "loss": 1.4379,
+ "step": 2409
+ },
+ {
+ "epoch": 2.410678003188397,
+ "grad_norm": 0.6141494512557983,
+ "learning_rate": 1.834113849001069e-05,
+ "loss": 1.2779,
+ "step": 2410
+ },
+ {
+ "epoch": 2.4116782845175204,
+ "grad_norm": 0.7914630174636841,
+ "learning_rate": 1.8280676511797666e-05,
+ "loss": 1.3245,
+ "step": 2411
+ },
+ {
+ "epoch": 2.4126785658466443,
+ "grad_norm": 0.6244723200798035,
+ "learning_rate": 1.8220304328342252e-05,
+ "loss": 1.3419,
+ "step": 2412
+ },
+ {
+ "epoch": 2.413678847175768,
+ "grad_norm": 0.6117607951164246,
+ "learning_rate": 1.8160022005982515e-05,
+ "loss": 1.2608,
+ "step": 2413
+ },
+ {
+ "epoch": 2.414679128504892,
+ "grad_norm": 0.5759628415107727,
+ "learning_rate": 1.8099829610957863e-05,
+ "loss": 1.1133,
+ "step": 2414
+ },
+ {
+ "epoch": 2.415679409834016,
+ "grad_norm": 0.6035516858100891,
+ "learning_rate": 1.8039727209408842e-05,
+ "loss": 1.2471,
+ "step": 2415
+ },
+ {
+ "epoch": 2.4166796911631394,
+ "grad_norm": 0.6468955278396606,
+ "learning_rate": 1.7979714867377152e-05,
+ "loss": 1.219,
+ "step": 2416
+ },
+ {
+ "epoch": 2.4176799724922633,
+ "grad_norm": 0.7172923684120178,
+ "learning_rate": 1.7919792650805455e-05,
+ "loss": 1.2992,
+ "step": 2417
+ },
+ {
+ "epoch": 2.418680253821387,
+ "grad_norm": 0.5648188591003418,
+ "learning_rate": 1.7859960625537476e-05,
+ "loss": 1.3393,
+ "step": 2418
+ },
+ {
+ "epoch": 2.419680535150511,
+ "grad_norm": 0.5623717904090881,
+ "learning_rate": 1.7800218857317742e-05,
+ "loss": 1.3284,
+ "step": 2419
+ },
+ {
+ "epoch": 2.420680816479635,
+ "grad_norm": 0.5881792306900024,
+ "learning_rate": 1.774056741179171e-05,
+ "loss": 1.0625,
+ "step": 2420
+ },
+ {
+ "epoch": 2.421681097808759,
+ "grad_norm": 0.6556890606880188,
+ "learning_rate": 1.7681006354505493e-05,
+ "loss": 1.1079,
+ "step": 2421
+ },
+ {
+ "epoch": 2.4226813791378827,
+ "grad_norm": 0.7625473737716675,
+ "learning_rate": 1.7621535750905905e-05,
+ "loss": 1.3825,
+ "step": 2422
+ },
+ {
+ "epoch": 2.423681660467006,
+ "grad_norm": 0.5806693434715271,
+ "learning_rate": 1.756215566634043e-05,
+ "loss": 1.2253,
+ "step": 2423
+ },
+ {
+ "epoch": 2.42468194179613,
+ "grad_norm": 0.6054913997650146,
+ "learning_rate": 1.7502866166056986e-05,
+ "loss": 1.2402,
+ "step": 2424
+ },
+ {
+ "epoch": 2.425682223125254,
+ "grad_norm": 0.6507579684257507,
+ "learning_rate": 1.744366731520408e-05,
+ "loss": 1.5156,
+ "step": 2425
+ },
+ {
+ "epoch": 2.426682504454378,
+ "grad_norm": 0.5348602533340454,
+ "learning_rate": 1.7384559178830472e-05,
+ "loss": 1.2455,
+ "step": 2426
+ },
+ {
+ "epoch": 2.4276827857835017,
+ "grad_norm": 0.7310481071472168,
+ "learning_rate": 1.7325541821885384e-05,
+ "loss": 1.3409,
+ "step": 2427
+ },
+ {
+ "epoch": 2.4286830671126256,
+ "grad_norm": 0.6602012515068054,
+ "learning_rate": 1.726661530921815e-05,
+ "loss": 1.3951,
+ "step": 2428
+ },
+ {
+ "epoch": 2.429683348441749,
+ "grad_norm": 0.6798003911972046,
+ "learning_rate": 1.7207779705578375e-05,
+ "loss": 1.4299,
+ "step": 2429
+ },
+ {
+ "epoch": 2.430683629770873,
+ "grad_norm": 0.625441312789917,
+ "learning_rate": 1.7149035075615794e-05,
+ "loss": 1.1568,
+ "step": 2430
+ },
+ {
+ "epoch": 2.431683911099997,
+ "grad_norm": 0.5963711142539978,
+ "learning_rate": 1.709038148388007e-05,
+ "loss": 1.1604,
+ "step": 2431
+ },
+ {
+ "epoch": 2.4326841924291207,
+ "grad_norm": 0.6797822117805481,
+ "learning_rate": 1.7031818994820926e-05,
+ "loss": 1.4374,
+ "step": 2432
+ },
+ {
+ "epoch": 2.4336844737582446,
+ "grad_norm": 0.6096747517585754,
+ "learning_rate": 1.697334767278792e-05,
+ "loss": 1.2968,
+ "step": 2433
+ },
+ {
+ "epoch": 2.4346847550873685,
+ "grad_norm": 0.6281675696372986,
+ "learning_rate": 1.6914967582030493e-05,
+ "loss": 1.1493,
+ "step": 2434
+ },
+ {
+ "epoch": 2.4356850364164924,
+ "grad_norm": 0.6768115162849426,
+ "learning_rate": 1.6856678786697778e-05,
+ "loss": 1.1602,
+ "step": 2435
+ },
+ {
+ "epoch": 2.436685317745616,
+ "grad_norm": 1.0345252752304077,
+ "learning_rate": 1.6798481350838648e-05,
+ "loss": 1.3905,
+ "step": 2436
+ },
+ {
+ "epoch": 2.4376855990747397,
+ "grad_norm": 0.6273905634880066,
+ "learning_rate": 1.6740375338401526e-05,
+ "loss": 1.3156,
+ "step": 2437
+ },
+ {
+ "epoch": 2.4386858804038636,
+ "grad_norm": 0.6700469255447388,
+ "learning_rate": 1.6682360813234444e-05,
+ "loss": 1.1926,
+ "step": 2438
+ },
+ {
+ "epoch": 2.4396861617329875,
+ "grad_norm": 0.6240590214729309,
+ "learning_rate": 1.6624437839084862e-05,
+ "loss": 1.2081,
+ "step": 2439
+ },
+ {
+ "epoch": 2.4406864430621114,
+ "grad_norm": 0.661861777305603,
+ "learning_rate": 1.656660647959962e-05,
+ "loss": 1.0889,
+ "step": 2440
+ },
+ {
+ "epoch": 2.441686724391235,
+ "grad_norm": 0.6484658718109131,
+ "learning_rate": 1.6508866798324986e-05,
+ "loss": 1.2571,
+ "step": 2441
+ },
+ {
+ "epoch": 2.4426870057203587,
+ "grad_norm": 0.6831806302070618,
+ "learning_rate": 1.6451218858706374e-05,
+ "loss": 1.3593,
+ "step": 2442
+ },
+ {
+ "epoch": 2.4436872870494826,
+ "grad_norm": 0.8489123582839966,
+ "learning_rate": 1.6393662724088478e-05,
+ "loss": 1.4949,
+ "step": 2443
+ },
+ {
+ "epoch": 2.4446875683786065,
+ "grad_norm": 0.7886331081390381,
+ "learning_rate": 1.633619845771501e-05,
+ "loss": 1.1703,
+ "step": 2444
+ },
+ {
+ "epoch": 2.4456878497077303,
+ "grad_norm": 0.700215756893158,
+ "learning_rate": 1.627882612272893e-05,
+ "loss": 1.206,
+ "step": 2445
+ },
+ {
+ "epoch": 2.4466881310368542,
+ "grad_norm": 0.9291819930076599,
+ "learning_rate": 1.622154578217199e-05,
+ "loss": 1.2846,
+ "step": 2446
+ },
+ {
+ "epoch": 2.447688412365978,
+ "grad_norm": 0.5982426404953003,
+ "learning_rate": 1.6164357498984893e-05,
+ "loss": 0.9123,
+ "step": 2447
+ },
+ {
+ "epoch": 2.4486886936951016,
+ "grad_norm": 0.6755865812301636,
+ "learning_rate": 1.6107261336007285e-05,
+ "loss": 1.345,
+ "step": 2448
+ },
+ {
+ "epoch": 2.4496889750242254,
+ "grad_norm": 0.7251035571098328,
+ "learning_rate": 1.605025735597746e-05,
+ "loss": 1.2056,
+ "step": 2449
+ },
+ {
+ "epoch": 2.4506892563533493,
+ "grad_norm": 0.6493537425994873,
+ "learning_rate": 1.599334562153254e-05,
+ "loss": 1.1518,
+ "step": 2450
+ },
+ {
+ "epoch": 2.451689537682473,
+ "grad_norm": 0.5705104470252991,
+ "learning_rate": 1.593652619520819e-05,
+ "loss": 1.2496,
+ "step": 2451
+ },
+ {
+ "epoch": 2.452689819011597,
+ "grad_norm": 0.7152146100997925,
+ "learning_rate": 1.587979913943871e-05,
+ "loss": 1.1498,
+ "step": 2452
+ },
+ {
+ "epoch": 2.453690100340721,
+ "grad_norm": 0.5634121894836426,
+ "learning_rate": 1.5823164516556842e-05,
+ "loss": 1.1976,
+ "step": 2453
+ },
+ {
+ "epoch": 2.4546903816698444,
+ "grad_norm": 0.6320593953132629,
+ "learning_rate": 1.5766622388793838e-05,
+ "loss": 1.2146,
+ "step": 2454
+ },
+ {
+ "epoch": 2.4556906629989683,
+ "grad_norm": 0.5591527223587036,
+ "learning_rate": 1.5710172818279222e-05,
+ "loss": 1.1641,
+ "step": 2455
+ },
+ {
+ "epoch": 2.456690944328092,
+ "grad_norm": 0.6221880316734314,
+ "learning_rate": 1.5653815867040923e-05,
+ "loss": 1.3021,
+ "step": 2456
+ },
+ {
+ "epoch": 2.457691225657216,
+ "grad_norm": 0.6908786296844482,
+ "learning_rate": 1.5597551597004966e-05,
+ "loss": 1.2944,
+ "step": 2457
+ },
+ {
+ "epoch": 2.45869150698634,
+ "grad_norm": 0.7013647556304932,
+ "learning_rate": 1.554138006999568e-05,
+ "loss": 1.4917,
+ "step": 2458
+ },
+ {
+ "epoch": 2.459691788315464,
+ "grad_norm": 0.6134495735168457,
+ "learning_rate": 1.5485301347735348e-05,
+ "loss": 1.3146,
+ "step": 2459
+ },
+ {
+ "epoch": 2.4606920696445878,
+ "grad_norm": 0.709322988986969,
+ "learning_rate": 1.5429315491844388e-05,
+ "loss": 1.123,
+ "step": 2460
+ },
+ {
+ "epoch": 2.461692350973711,
+ "grad_norm": 0.5995905995368958,
+ "learning_rate": 1.5373422563841133e-05,
+ "loss": 1.2285,
+ "step": 2461
+ },
+ {
+ "epoch": 2.462692632302835,
+ "grad_norm": 0.5527827143669128,
+ "learning_rate": 1.531762262514177e-05,
+ "loss": 1.0853,
+ "step": 2462
+ },
+ {
+ "epoch": 2.463692913631959,
+ "grad_norm": 0.6579242944717407,
+ "learning_rate": 1.5261915737060384e-05,
+ "loss": 1.2392,
+ "step": 2463
+ },
+ {
+ "epoch": 2.464693194961083,
+ "grad_norm": 0.613092303276062,
+ "learning_rate": 1.5206301960808722e-05,
+ "loss": 1.1744,
+ "step": 2464
+ },
+ {
+ "epoch": 2.4656934762902067,
+ "grad_norm": 0.585175633430481,
+ "learning_rate": 1.5150781357496314e-05,
+ "loss": 1.0613,
+ "step": 2465
+ },
+ {
+ "epoch": 2.4666937576193306,
+ "grad_norm": 0.6694045066833496,
+ "learning_rate": 1.5095353988130235e-05,
+ "loss": 1.3062,
+ "step": 2466
+ },
+ {
+ "epoch": 2.467694038948454,
+ "grad_norm": 0.6092827320098877,
+ "learning_rate": 1.5040019913615123e-05,
+ "loss": 1.2655,
+ "step": 2467
+ },
+ {
+ "epoch": 2.468694320277578,
+ "grad_norm": 0.6726471781730652,
+ "learning_rate": 1.4984779194753151e-05,
+ "loss": 1.4597,
+ "step": 2468
+ },
+ {
+ "epoch": 2.469694601606702,
+ "grad_norm": 0.8569711446762085,
+ "learning_rate": 1.4929631892243856e-05,
+ "loss": 1.5663,
+ "step": 2469
+ },
+ {
+ "epoch": 2.4706948829358257,
+ "grad_norm": 0.8185261487960815,
+ "learning_rate": 1.4874578066684186e-05,
+ "loss": 1.3967,
+ "step": 2470
+ },
+ {
+ "epoch": 2.4716951642649496,
+ "grad_norm": 0.5982089042663574,
+ "learning_rate": 1.4819617778568285e-05,
+ "loss": 1.1807,
+ "step": 2471
+ },
+ {
+ "epoch": 2.4726954455940735,
+ "grad_norm": 0.5836905241012573,
+ "learning_rate": 1.476475108828762e-05,
+ "loss": 1.2433,
+ "step": 2472
+ },
+ {
+ "epoch": 2.4736957269231974,
+ "grad_norm": 0.6865205764770508,
+ "learning_rate": 1.4709978056130713e-05,
+ "loss": 1.2392,
+ "step": 2473
+ },
+ {
+ "epoch": 2.474696008252321,
+ "grad_norm": 0.6308683156967163,
+ "learning_rate": 1.4655298742283252e-05,
+ "loss": 1.1384,
+ "step": 2474
+ },
+ {
+ "epoch": 2.4756962895814447,
+ "grad_norm": 0.5822309255599976,
+ "learning_rate": 1.4600713206827932e-05,
+ "loss": 1.0825,
+ "step": 2475
+ },
+ {
+ "epoch": 2.4766965709105686,
+ "grad_norm": 0.6284547448158264,
+ "learning_rate": 1.454622150974434e-05,
+ "loss": 1.2787,
+ "step": 2476
+ },
+ {
+ "epoch": 2.4776968522396925,
+ "grad_norm": 0.9280424118041992,
+ "learning_rate": 1.4491823710909047e-05,
+ "loss": 1.364,
+ "step": 2477
+ },
+ {
+ "epoch": 2.4786971335688164,
+ "grad_norm": 0.6958044171333313,
+ "learning_rate": 1.4437519870095329e-05,
+ "loss": 1.2987,
+ "step": 2478
+ },
+ {
+ "epoch": 2.47969741489794,
+ "grad_norm": 0.6451518535614014,
+ "learning_rate": 1.4383310046973365e-05,
+ "loss": 0.995,
+ "step": 2479
+ },
+ {
+ "epoch": 2.4806976962270637,
+ "grad_norm": 0.7004462480545044,
+ "learning_rate": 1.4329194301109872e-05,
+ "loss": 1.325,
+ "step": 2480
+ },
+ {
+ "epoch": 2.4816979775561876,
+ "grad_norm": 0.6288760304450989,
+ "learning_rate": 1.427517269196833e-05,
+ "loss": 1.2204,
+ "step": 2481
+ },
+ {
+ "epoch": 2.4826982588853115,
+ "grad_norm": 0.7389028072357178,
+ "learning_rate": 1.4221245278908668e-05,
+ "loss": 1.3713,
+ "step": 2482
+ },
+ {
+ "epoch": 2.4836985402144354,
+ "grad_norm": 0.6103761196136475,
+ "learning_rate": 1.4167412121187406e-05,
+ "loss": 1.2789,
+ "step": 2483
+ },
+ {
+ "epoch": 2.4846988215435593,
+ "grad_norm": 0.7794318795204163,
+ "learning_rate": 1.4113673277957395e-05,
+ "loss": 1.2033,
+ "step": 2484
+ },
+ {
+ "epoch": 2.485699102872683,
+ "grad_norm": 0.6768416166305542,
+ "learning_rate": 1.4060028808267967e-05,
+ "loss": 1.2824,
+ "step": 2485
+ },
+ {
+ "epoch": 2.4866993842018066,
+ "grad_norm": 0.6633111238479614,
+ "learning_rate": 1.4006478771064646e-05,
+ "loss": 1.2352,
+ "step": 2486
+ },
+ {
+ "epoch": 2.4876996655309305,
+ "grad_norm": 0.5679075717926025,
+ "learning_rate": 1.3953023225189243e-05,
+ "loss": 1.1662,
+ "step": 2487
+ },
+ {
+ "epoch": 2.4886999468600544,
+ "grad_norm": 0.682587206363678,
+ "learning_rate": 1.389966222937974e-05,
+ "loss": 1.2081,
+ "step": 2488
+ },
+ {
+ "epoch": 2.4897002281891782,
+ "grad_norm": 0.5690992474555969,
+ "learning_rate": 1.3846395842270232e-05,
+ "loss": 1.1828,
+ "step": 2489
+ },
+ {
+ "epoch": 2.490700509518302,
+ "grad_norm": 0.725383460521698,
+ "learning_rate": 1.3793224122390858e-05,
+ "loss": 1.4199,
+ "step": 2490
+ },
+ {
+ "epoch": 2.491700790847426,
+ "grad_norm": 0.7507576942443848,
+ "learning_rate": 1.374014712816768e-05,
+ "loss": 1.2292,
+ "step": 2491
+ },
+ {
+ "epoch": 2.4927010721765495,
+ "grad_norm": 0.6473737955093384,
+ "learning_rate": 1.3687164917922768e-05,
+ "loss": 1.4711,
+ "step": 2492
+ },
+ {
+ "epoch": 2.4937013535056733,
+ "grad_norm": 0.7573422193527222,
+ "learning_rate": 1.3634277549873953e-05,
+ "loss": 1.4421,
+ "step": 2493
+ },
+ {
+ "epoch": 2.4947016348347972,
+ "grad_norm": 0.6150047779083252,
+ "learning_rate": 1.3581485082134882e-05,
+ "loss": 1.1721,
+ "step": 2494
+ },
+ {
+ "epoch": 2.495701916163921,
+ "grad_norm": 0.6486732959747314,
+ "learning_rate": 1.3528787572714952e-05,
+ "loss": 1.2939,
+ "step": 2495
+ },
+ {
+ "epoch": 2.496702197493045,
+ "grad_norm": 0.632709801197052,
+ "learning_rate": 1.3476185079519177e-05,
+ "loss": 1.4171,
+ "step": 2496
+ },
+ {
+ "epoch": 2.497702478822169,
+ "grad_norm": 0.7160407900810242,
+ "learning_rate": 1.342367766034821e-05,
+ "loss": 1.3601,
+ "step": 2497
+ },
+ {
+ "epoch": 2.498702760151293,
+ "grad_norm": 0.7090329527854919,
+ "learning_rate": 1.3371265372898167e-05,
+ "loss": 1.4255,
+ "step": 2498
+ },
+ {
+ "epoch": 2.499703041480416,
+ "grad_norm": 0.6394259929656982,
+ "learning_rate": 1.3318948274760734e-05,
+ "loss": 1.2454,
+ "step": 2499
+ },
+ {
+ "epoch": 2.50070332280954,
+ "grad_norm": 0.6224787831306458,
+ "learning_rate": 1.326672642342287e-05,
+ "loss": 1.3184,
+ "step": 2500
+ },
+ {
+ "epoch": 2.501703604138664,
+ "grad_norm": 0.6741747856140137,
+ "learning_rate": 1.3214599876266998e-05,
+ "loss": 1.3455,
+ "step": 2501
+ },
+ {
+ "epoch": 2.502703885467788,
+ "grad_norm": 0.5820305347442627,
+ "learning_rate": 1.3162568690570743e-05,
+ "loss": 1.2207,
+ "step": 2502
+ },
+ {
+ "epoch": 2.5037041667969118,
+ "grad_norm": 0.7197089791297913,
+ "learning_rate": 1.311063292350696e-05,
+ "loss": 1.387,
+ "step": 2503
+ },
+ {
+ "epoch": 2.504704448126035,
+ "grad_norm": 0.6681255102157593,
+ "learning_rate": 1.3058792632143701e-05,
+ "loss": 1.3116,
+ "step": 2504
+ },
+ {
+ "epoch": 2.505704729455159,
+ "grad_norm": 0.6714862585067749,
+ "learning_rate": 1.3007047873444034e-05,
+ "loss": 1.246,
+ "step": 2505
+ },
+ {
+ "epoch": 2.506705010784283,
+ "grad_norm": 0.6364768147468567,
+ "learning_rate": 1.295539870426612e-05,
+ "loss": 1.3559,
+ "step": 2506
+ },
+ {
+ "epoch": 2.507705292113407,
+ "grad_norm": 0.6749326586723328,
+ "learning_rate": 1.2903845181363017e-05,
+ "loss": 1.2395,
+ "step": 2507
+ },
+ {
+ "epoch": 2.5087055734425308,
+ "grad_norm": 0.7527327537536621,
+ "learning_rate": 1.2852387361382767e-05,
+ "loss": 1.4625,
+ "step": 2508
+ },
+ {
+ "epoch": 2.5097058547716546,
+ "grad_norm": 0.684592604637146,
+ "learning_rate": 1.2801025300868163e-05,
+ "loss": 1.2286,
+ "step": 2509
+ },
+ {
+ "epoch": 2.5107061361007785,
+ "grad_norm": 0.6808905601501465,
+ "learning_rate": 1.2749759056256883e-05,
+ "loss": 1.4034,
+ "step": 2510
+ },
+ {
+ "epoch": 2.5117064174299024,
+ "grad_norm": 0.7233620285987854,
+ "learning_rate": 1.2698588683881186e-05,
+ "loss": 1.3094,
+ "step": 2511
+ },
+ {
+ "epoch": 2.512706698759026,
+ "grad_norm": 0.6889445781707764,
+ "learning_rate": 1.2647514239968105e-05,
+ "loss": 1.1749,
+ "step": 2512
+ },
+ {
+ "epoch": 2.5137069800881497,
+ "grad_norm": 0.6665747165679932,
+ "learning_rate": 1.2596535780639218e-05,
+ "loss": 1.3105,
+ "step": 2513
+ },
+ {
+ "epoch": 2.5147072614172736,
+ "grad_norm": 0.6501197218894958,
+ "learning_rate": 1.2545653361910592e-05,
+ "loss": 1.1436,
+ "step": 2514
+ },
+ {
+ "epoch": 2.5157075427463975,
+ "grad_norm": 0.8026642203330994,
+ "learning_rate": 1.2494867039692848e-05,
+ "loss": 1.2504,
+ "step": 2515
+ },
+ {
+ "epoch": 2.5167078240755214,
+ "grad_norm": 0.5991237759590149,
+ "learning_rate": 1.2444176869790925e-05,
+ "loss": 1.3352,
+ "step": 2516
+ },
+ {
+ "epoch": 2.517708105404645,
+ "grad_norm": 0.6183250546455383,
+ "learning_rate": 1.23935829079042e-05,
+ "loss": 1.093,
+ "step": 2517
+ },
+ {
+ "epoch": 2.5187083867337687,
+ "grad_norm": 0.6797662377357483,
+ "learning_rate": 1.2343085209626225e-05,
+ "loss": 1.2259,
+ "step": 2518
+ },
+ {
+ "epoch": 2.5197086680628926,
+ "grad_norm": 0.6497726440429688,
+ "learning_rate": 1.2292683830444919e-05,
+ "loss": 1.1837,
+ "step": 2519
+ },
+ {
+ "epoch": 2.5207089493920165,
+ "grad_norm": 0.6999863386154175,
+ "learning_rate": 1.224237882574224e-05,
+ "loss": 1.0971,
+ "step": 2520
+ },
+ {
+ "epoch": 2.5217092307211404,
+ "grad_norm": 0.6802374124526978,
+ "learning_rate": 1.2192170250794277e-05,
+ "loss": 1.1931,
+ "step": 2521
+ },
+ {
+ "epoch": 2.5227095120502643,
+ "grad_norm": 0.5806543827056885,
+ "learning_rate": 1.2142058160771207e-05,
+ "loss": 1.3241,
+ "step": 2522
+ },
+ {
+ "epoch": 2.523709793379388,
+ "grad_norm": 0.6885263323783875,
+ "learning_rate": 1.2092042610737108e-05,
+ "loss": 1.2562,
+ "step": 2523
+ },
+ {
+ "epoch": 2.5247100747085116,
+ "grad_norm": 0.6671591997146606,
+ "learning_rate": 1.2042123655650084e-05,
+ "loss": 1.3449,
+ "step": 2524
+ },
+ {
+ "epoch": 2.5257103560376355,
+ "grad_norm": 0.6619551777839661,
+ "learning_rate": 1.1992301350361978e-05,
+ "loss": 1.1874,
+ "step": 2525
+ },
+ {
+ "epoch": 2.5267106373667594,
+ "grad_norm": 0.6866716146469116,
+ "learning_rate": 1.1942575749618546e-05,
+ "loss": 1.4093,
+ "step": 2526
+ },
+ {
+ "epoch": 2.5277109186958833,
+ "grad_norm": 0.6128354072570801,
+ "learning_rate": 1.1892946908059188e-05,
+ "loss": 1.2392,
+ "step": 2527
+ },
+ {
+ "epoch": 2.528711200025007,
+ "grad_norm": 0.6749070286750793,
+ "learning_rate": 1.1843414880217051e-05,
+ "loss": 1.0333,
+ "step": 2528
+ },
+ {
+ "epoch": 2.5297114813541306,
+ "grad_norm": 0.6855870485305786,
+ "learning_rate": 1.1793979720518866e-05,
+ "loss": 1.2522,
+ "step": 2529
+ },
+ {
+ "epoch": 2.5307117626832545,
+ "grad_norm": 0.650435745716095,
+ "learning_rate": 1.1744641483284946e-05,
+ "loss": 1.034,
+ "step": 2530
+ },
+ {
+ "epoch": 2.5317120440123784,
+ "grad_norm": 0.5725820064544678,
+ "learning_rate": 1.1695400222729058e-05,
+ "loss": 1.156,
+ "step": 2531
+ },
+ {
+ "epoch": 2.5327123253415023,
+ "grad_norm": 0.6388399004936218,
+ "learning_rate": 1.1646255992958466e-05,
+ "loss": 1.3486,
+ "step": 2532
+ },
+ {
+ "epoch": 2.533712606670626,
+ "grad_norm": 0.611092746257782,
+ "learning_rate": 1.1597208847973818e-05,
+ "loss": 1.1422,
+ "step": 2533
+ },
+ {
+ "epoch": 2.53471288799975,
+ "grad_norm": 0.563726544380188,
+ "learning_rate": 1.1548258841669002e-05,
+ "loss": 1.2846,
+ "step": 2534
+ },
+ {
+ "epoch": 2.535713169328874,
+ "grad_norm": 0.6702500581741333,
+ "learning_rate": 1.1499406027831262e-05,
+ "loss": 1.3538,
+ "step": 2535
+ },
+ {
+ "epoch": 2.536713450657998,
+ "grad_norm": 0.6740272045135498,
+ "learning_rate": 1.1450650460140977e-05,
+ "loss": 1.3119,
+ "step": 2536
+ },
+ {
+ "epoch": 2.5377137319871212,
+ "grad_norm": 0.6462887525558472,
+ "learning_rate": 1.140199219217174e-05,
+ "loss": 1.1595,
+ "step": 2537
+ },
+ {
+ "epoch": 2.538714013316245,
+ "grad_norm": 0.6164881587028503,
+ "learning_rate": 1.1353431277390126e-05,
+ "loss": 1.2632,
+ "step": 2538
+ },
+ {
+ "epoch": 2.539714294645369,
+ "grad_norm": 0.6680785417556763,
+ "learning_rate": 1.1304967769155861e-05,
+ "loss": 1.3207,
+ "step": 2539
+ },
+ {
+ "epoch": 2.540714575974493,
+ "grad_norm": 0.6546500325202942,
+ "learning_rate": 1.1256601720721549e-05,
+ "loss": 1.1022,
+ "step": 2540
+ },
+ {
+ "epoch": 2.541714857303617,
+ "grad_norm": 0.7679521441459656,
+ "learning_rate": 1.1208333185232712e-05,
+ "loss": 1.159,
+ "step": 2541
+ },
+ {
+ "epoch": 2.5427151386327402,
+ "grad_norm": 0.6083775758743286,
+ "learning_rate": 1.1160162215727765e-05,
+ "loss": 1.3186,
+ "step": 2542
+ },
+ {
+ "epoch": 2.543715419961864,
+ "grad_norm": 0.7204008102416992,
+ "learning_rate": 1.111208886513787e-05,
+ "loss": 1.4008,
+ "step": 2543
+ },
+ {
+ "epoch": 2.544715701290988,
+ "grad_norm": 0.6674452424049377,
+ "learning_rate": 1.1064113186286984e-05,
+ "loss": 1.4622,
+ "step": 2544
+ },
+ {
+ "epoch": 2.545715982620112,
+ "grad_norm": 0.7217103838920593,
+ "learning_rate": 1.1016235231891658e-05,
+ "loss": 1.2243,
+ "step": 2545
+ },
+ {
+ "epoch": 2.546716263949236,
+ "grad_norm": 0.5915346145629883,
+ "learning_rate": 1.0968455054561145e-05,
+ "loss": 1.3309,
+ "step": 2546
+ },
+ {
+ "epoch": 2.5477165452783597,
+ "grad_norm": 0.714104950428009,
+ "learning_rate": 1.0920772706797167e-05,
+ "loss": 1.3601,
+ "step": 2547
+ },
+ {
+ "epoch": 2.5487168266074836,
+ "grad_norm": 0.6451223492622375,
+ "learning_rate": 1.0873188240994048e-05,
+ "loss": 1.1407,
+ "step": 2548
+ },
+ {
+ "epoch": 2.5497171079366074,
+ "grad_norm": 0.6306673884391785,
+ "learning_rate": 1.0825701709438508e-05,
+ "loss": 1.2198,
+ "step": 2549
+ },
+ {
+ "epoch": 2.550717389265731,
+ "grad_norm": 0.6271165609359741,
+ "learning_rate": 1.0778313164309616e-05,
+ "loss": 1.3678,
+ "step": 2550
+ },
+ {
+ "epoch": 2.5517176705948548,
+ "grad_norm": 0.7283329367637634,
+ "learning_rate": 1.0731022657678869e-05,
+ "loss": 1.3104,
+ "step": 2551
+ },
+ {
+ "epoch": 2.5527179519239787,
+ "grad_norm": 0.5688088536262512,
+ "learning_rate": 1.068383024150994e-05,
+ "loss": 1.0877,
+ "step": 2552
+ },
+ {
+ "epoch": 2.5537182332531025,
+ "grad_norm": 0.6837950944900513,
+ "learning_rate": 1.0636735967658784e-05,
+ "loss": 1.1619,
+ "step": 2553
+ },
+ {
+ "epoch": 2.554718514582226,
+ "grad_norm": 0.5869771242141724,
+ "learning_rate": 1.0589739887873462e-05,
+ "loss": 1.173,
+ "step": 2554
+ },
+ {
+ "epoch": 2.55571879591135,
+ "grad_norm": 0.6485967040061951,
+ "learning_rate": 1.05428420537942e-05,
+ "loss": 1.1743,
+ "step": 2555
+ },
+ {
+ "epoch": 2.5567190772404738,
+ "grad_norm": 0.7167952656745911,
+ "learning_rate": 1.0496042516953209e-05,
+ "loss": 1.546,
+ "step": 2556
+ },
+ {
+ "epoch": 2.5577193585695976,
+ "grad_norm": 0.6303339600563049,
+ "learning_rate": 1.0449341328774743e-05,
+ "loss": 1.0787,
+ "step": 2557
+ },
+ {
+ "epoch": 2.5587196398987215,
+ "grad_norm": 0.6141791939735413,
+ "learning_rate": 1.040273854057493e-05,
+ "loss": 1.2537,
+ "step": 2558
+ },
+ {
+ "epoch": 2.5597199212278454,
+ "grad_norm": 0.6624209880828857,
+ "learning_rate": 1.0356234203561832e-05,
+ "loss": 1.3441,
+ "step": 2559
+ },
+ {
+ "epoch": 2.5607202025569693,
+ "grad_norm": 0.5435961484909058,
+ "learning_rate": 1.0309828368835305e-05,
+ "loss": 1.1835,
+ "step": 2560
+ },
+ {
+ "epoch": 2.561720483886093,
+ "grad_norm": 0.6647830605506897,
+ "learning_rate": 1.026352108738694e-05,
+ "loss": 1.2157,
+ "step": 2561
+ },
+ {
+ "epoch": 2.5627207652152166,
+ "grad_norm": 0.6574770212173462,
+ "learning_rate": 1.0217312410100089e-05,
+ "loss": 1.2766,
+ "step": 2562
+ },
+ {
+ "epoch": 2.5637210465443405,
+ "grad_norm": 0.7487883567810059,
+ "learning_rate": 1.0171202387749724e-05,
+ "loss": 1.2969,
+ "step": 2563
+ },
+ {
+ "epoch": 2.5647213278734644,
+ "grad_norm": 0.6833634972572327,
+ "learning_rate": 1.0125191071002438e-05,
+ "loss": 1.2627,
+ "step": 2564
+ },
+ {
+ "epoch": 2.5657216092025883,
+ "grad_norm": 0.6321252584457397,
+ "learning_rate": 1.0079278510416313e-05,
+ "loss": 1.0923,
+ "step": 2565
+ },
+ {
+ "epoch": 2.566721890531712,
+ "grad_norm": 0.6387611627578735,
+ "learning_rate": 1.0033464756440991e-05,
+ "loss": 1.313,
+ "step": 2566
+ },
+ {
+ "epoch": 2.5677221718608356,
+ "grad_norm": 0.5905706882476807,
+ "learning_rate": 9.987749859417484e-06,
+ "loss": 1.1996,
+ "step": 2567
+ },
+ {
+ "epoch": 2.5687224531899595,
+ "grad_norm": 0.6723807454109192,
+ "learning_rate": 9.942133869578164e-06,
+ "loss": 1.361,
+ "step": 2568
+ },
+ {
+ "epoch": 2.5697227345190834,
+ "grad_norm": 0.5416411757469177,
+ "learning_rate": 9.896616837046813e-06,
+ "loss": 1.2601,
+ "step": 2569
+ },
+ {
+ "epoch": 2.5707230158482073,
+ "grad_norm": 0.66501384973526,
+ "learning_rate": 9.851198811838368e-06,
+ "loss": 1.3886,
+ "step": 2570
+ },
+ {
+ "epoch": 2.571723297177331,
+ "grad_norm": 0.627778172492981,
+ "learning_rate": 9.805879843859057e-06,
+ "loss": 1.1325,
+ "step": 2571
+ },
+ {
+ "epoch": 2.572723578506455,
+ "grad_norm": 0.5483949780464172,
+ "learning_rate": 9.760659982906206e-06,
+ "loss": 1.1557,
+ "step": 2572
+ },
+ {
+ "epoch": 2.573723859835579,
+ "grad_norm": 0.7219141721725464,
+ "learning_rate": 9.715539278668284e-06,
+ "loss": 1.1909,
+ "step": 2573
+ },
+ {
+ "epoch": 2.574724141164703,
+ "grad_norm": 0.6443437337875366,
+ "learning_rate": 9.670517780724775e-06,
+ "loss": 1.3173,
+ "step": 2574
+ },
+ {
+ "epoch": 2.5757244224938263,
+ "grad_norm": 0.624271035194397,
+ "learning_rate": 9.625595538546173e-06,
+ "loss": 1.3176,
+ "step": 2575
+ },
+ {
+ "epoch": 2.57672470382295,
+ "grad_norm": 0.6539580821990967,
+ "learning_rate": 9.580772601493871e-06,
+ "loss": 1.1758,
+ "step": 2576
+ },
+ {
+ "epoch": 2.577724985152074,
+ "grad_norm": 0.6732341051101685,
+ "learning_rate": 9.536049018820192e-06,
+ "loss": 1.2681,
+ "step": 2577
+ },
+ {
+ "epoch": 2.578725266481198,
+ "grad_norm": 0.6086184978485107,
+ "learning_rate": 9.491424839668273e-06,
+ "loss": 1.4588,
+ "step": 2578
+ },
+ {
+ "epoch": 2.579725547810322,
+ "grad_norm": 0.6377266645431519,
+ "learning_rate": 9.446900113072e-06,
+ "loss": 1.1644,
+ "step": 2579
+ },
+ {
+ "epoch": 2.5807258291394453,
+ "grad_norm": 0.6585245728492737,
+ "learning_rate": 9.402474887956015e-06,
+ "loss": 1.3507,
+ "step": 2580
+ },
+ {
+ "epoch": 2.581726110468569,
+ "grad_norm": 0.6395363807678223,
+ "learning_rate": 9.35814921313557e-06,
+ "loss": 1.275,
+ "step": 2581
+ },
+ {
+ "epoch": 2.582726391797693,
+ "grad_norm": 0.631462574005127,
+ "learning_rate": 9.313923137316616e-06,
+ "loss": 1.4783,
+ "step": 2582
+ },
+ {
+ "epoch": 2.583726673126817,
+ "grad_norm": 0.701928436756134,
+ "learning_rate": 9.269796709095558e-06,
+ "loss": 1.1986,
+ "step": 2583
+ },
+ {
+ "epoch": 2.584726954455941,
+ "grad_norm": 0.6914161443710327,
+ "learning_rate": 9.225769976959408e-06,
+ "loss": 1.2553,
+ "step": 2584
+ },
+ {
+ "epoch": 2.5857272357850647,
+ "grad_norm": 0.6708330512046814,
+ "learning_rate": 9.18184298928556e-06,
+ "loss": 1.3881,
+ "step": 2585
+ },
+ {
+ "epoch": 2.5867275171141886,
+ "grad_norm": 0.6319028735160828,
+ "learning_rate": 9.138015794341815e-06,
+ "loss": 1.4741,
+ "step": 2586
+ },
+ {
+ "epoch": 2.587727798443312,
+ "grad_norm": 0.6569663882255554,
+ "learning_rate": 9.09428844028637e-06,
+ "loss": 1.218,
+ "step": 2587
+ },
+ {
+ "epoch": 2.588728079772436,
+ "grad_norm": 0.7259393334388733,
+ "learning_rate": 9.050660975167635e-06,
+ "loss": 1.2962,
+ "step": 2588
+ },
+ {
+ "epoch": 2.58972836110156,
+ "grad_norm": 0.8332608342170715,
+ "learning_rate": 9.007133446924343e-06,
+ "loss": 1.3116,
+ "step": 2589
+ },
+ {
+ "epoch": 2.5907286424306837,
+ "grad_norm": 0.6275321245193481,
+ "learning_rate": 8.963705903385345e-06,
+ "loss": 1.2789,
+ "step": 2590
+ },
+ {
+ "epoch": 2.5917289237598076,
+ "grad_norm": 0.5883187055587769,
+ "learning_rate": 8.92037839226969e-06,
+ "loss": 1.0979,
+ "step": 2591
+ },
+ {
+ "epoch": 2.592729205088931,
+ "grad_norm": 0.6807481646537781,
+ "learning_rate": 8.87715096118642e-06,
+ "loss": 1.3192,
+ "step": 2592
+ },
+ {
+ "epoch": 2.593729486418055,
+ "grad_norm": 0.6654528379440308,
+ "learning_rate": 8.834023657634739e-06,
+ "loss": 1.1681,
+ "step": 2593
+ },
+ {
+ "epoch": 2.594729767747179,
+ "grad_norm": 0.6537184715270996,
+ "learning_rate": 8.790996529003726e-06,
+ "loss": 1.2997,
+ "step": 2594
+ },
+ {
+ "epoch": 2.5957300490763027,
+ "grad_norm": 0.6835540533065796,
+ "learning_rate": 8.748069622572386e-06,
+ "loss": 1.287,
+ "step": 2595
+ },
+ {
+ "epoch": 2.5967303304054266,
+ "grad_norm": 0.5837362408638,
+ "learning_rate": 8.705242985509665e-06,
+ "loss": 1.2643,
+ "step": 2596
+ },
+ {
+ "epoch": 2.5977306117345504,
+ "grad_norm": 0.7342731356620789,
+ "learning_rate": 8.662516664874254e-06,
+ "loss": 1.3071,
+ "step": 2597
+ },
+ {
+ "epoch": 2.5987308930636743,
+ "grad_norm": 0.6614213585853577,
+ "learning_rate": 8.619890707614686e-06,
+ "loss": 1.323,
+ "step": 2598
+ },
+ {
+ "epoch": 2.599731174392798,
+ "grad_norm": 0.588958203792572,
+ "learning_rate": 8.57736516056915e-06,
+ "loss": 1.3028,
+ "step": 2599
+ },
+ {
+ "epoch": 2.6007314557219217,
+ "grad_norm": 0.7111889123916626,
+ "learning_rate": 8.534940070465568e-06,
+ "loss": 1.4335,
+ "step": 2600
+ },
+ {
+ "epoch": 2.6017317370510455,
+ "grad_norm": 0.614526093006134,
+ "learning_rate": 8.492615483921395e-06,
+ "loss": 1.1843,
+ "step": 2601
+ },
+ {
+ "epoch": 2.6027320183801694,
+ "grad_norm": 0.8497361540794373,
+ "learning_rate": 8.45039144744374e-06,
+ "loss": 1.3609,
+ "step": 2602
+ },
+ {
+ "epoch": 2.6037322997092933,
+ "grad_norm": 0.6366671919822693,
+ "learning_rate": 8.408268007429154e-06,
+ "loss": 1.2755,
+ "step": 2603
+ },
+ {
+ "epoch": 2.604732581038417,
+ "grad_norm": 0.5924212336540222,
+ "learning_rate": 8.366245210163703e-06,
+ "loss": 1.2317,
+ "step": 2604
+ },
+ {
+ "epoch": 2.6057328623675406,
+ "grad_norm": 0.5854505300521851,
+ "learning_rate": 8.324323101822829e-06,
+ "loss": 1.0763,
+ "step": 2605
+ },
+ {
+ "epoch": 2.6067331436966645,
+ "grad_norm": 0.6472430229187012,
+ "learning_rate": 8.282501728471315e-06,
+ "loss": 1.3323,
+ "step": 2606
+ },
+ {
+ "epoch": 2.6077334250257884,
+ "grad_norm": 0.8975957036018372,
+ "learning_rate": 8.240781136063346e-06,
+ "loss": 1.1955,
+ "step": 2607
+ },
+ {
+ "epoch": 2.6087337063549123,
+ "grad_norm": 0.6365610361099243,
+ "learning_rate": 8.199161370442276e-06,
+ "loss": 1.2761,
+ "step": 2608
+ },
+ {
+ "epoch": 2.609733987684036,
+ "grad_norm": 0.6164368987083435,
+ "learning_rate": 8.15764247734071e-06,
+ "loss": 1.1456,
+ "step": 2609
+ },
+ {
+ "epoch": 2.61073426901316,
+ "grad_norm": 0.5948717594146729,
+ "learning_rate": 8.116224502380387e-06,
+ "loss": 1.0682,
+ "step": 2610
+ },
+ {
+ "epoch": 2.611734550342284,
+ "grad_norm": 0.6960248351097107,
+ "learning_rate": 8.074907491072203e-06,
+ "loss": 1.1664,
+ "step": 2611
+ },
+ {
+ "epoch": 2.612734831671408,
+ "grad_norm": 0.6864999532699585,
+ "learning_rate": 8.033691488816054e-06,
+ "loss": 1.1924,
+ "step": 2612
+ },
+ {
+ "epoch": 2.6137351130005313,
+ "grad_norm": 0.7503032684326172,
+ "learning_rate": 7.992576540900876e-06,
+ "loss": 1.2848,
+ "step": 2613
+ },
+ {
+ "epoch": 2.614735394329655,
+ "grad_norm": 0.7044053077697754,
+ "learning_rate": 7.951562692504566e-06,
+ "loss": 1.1977,
+ "step": 2614
+ },
+ {
+ "epoch": 2.615735675658779,
+ "grad_norm": 0.5872082114219666,
+ "learning_rate": 7.910649988693908e-06,
+ "loss": 1.2364,
+ "step": 2615
+ },
+ {
+ "epoch": 2.616735956987903,
+ "grad_norm": 0.7040688395500183,
+ "learning_rate": 7.869838474424607e-06,
+ "loss": 1.308,
+ "step": 2616
+ },
+ {
+ "epoch": 2.6177362383170264,
+ "grad_norm": 0.628923773765564,
+ "learning_rate": 7.82912819454109e-06,
+ "loss": 1.3439,
+ "step": 2617
+ },
+ {
+ "epoch": 2.6187365196461503,
+ "grad_norm": 0.6534037590026855,
+ "learning_rate": 7.788519193776634e-06,
+ "loss": 1.1564,
+ "step": 2618
+ },
+ {
+ "epoch": 2.619736800975274,
+ "grad_norm": 0.6709951758384705,
+ "learning_rate": 7.74801151675314e-06,
+ "loss": 1.1008,
+ "step": 2619
+ },
+ {
+ "epoch": 2.620737082304398,
+ "grad_norm": 0.6788686513900757,
+ "learning_rate": 7.707605207981262e-06,
+ "loss": 1.1753,
+ "step": 2620
+ },
+ {
+ "epoch": 2.621737363633522,
+ "grad_norm": 0.686370849609375,
+ "learning_rate": 7.667300311860193e-06,
+ "loss": 1.238,
+ "step": 2621
+ },
+ {
+ "epoch": 2.622737644962646,
+ "grad_norm": 0.6644055247306824,
+ "learning_rate": 7.6270968726777414e-06,
+ "loss": 1.2212,
+ "step": 2622
+ },
+ {
+ "epoch": 2.6237379262917697,
+ "grad_norm": 0.6833274960517883,
+ "learning_rate": 7.586994934610225e-06,
+ "loss": 1.2673,
+ "step": 2623
+ },
+ {
+ "epoch": 2.6247382076208936,
+ "grad_norm": 0.8524953126907349,
+ "learning_rate": 7.546994541722408e-06,
+ "loss": 1.2621,
+ "step": 2624
+ },
+ {
+ "epoch": 2.625738488950017,
+ "grad_norm": 0.6766435503959656,
+ "learning_rate": 7.507095737967495e-06,
+ "loss": 1.1421,
+ "step": 2625
+ },
+ {
+ "epoch": 2.626738770279141,
+ "grad_norm": 0.6439013481140137,
+ "learning_rate": 7.467298567187042e-06,
+ "loss": 1.2881,
+ "step": 2626
+ },
+ {
+ "epoch": 2.627739051608265,
+ "grad_norm": 0.7657039165496826,
+ "learning_rate": 7.427603073110967e-06,
+ "loss": 1.197,
+ "step": 2627
+ },
+ {
+ "epoch": 2.6287393329373887,
+ "grad_norm": 0.6155279278755188,
+ "learning_rate": 7.3880092993574125e-06,
+ "loss": 1.2739,
+ "step": 2628
+ },
+ {
+ "epoch": 2.6297396142665126,
+ "grad_norm": 0.5381492376327515,
+ "learning_rate": 7.3485172894327995e-06,
+ "loss": 1.1274,
+ "step": 2629
+ },
+ {
+ "epoch": 2.630739895595636,
+ "grad_norm": 0.6491442322731018,
+ "learning_rate": 7.309127086731671e-06,
+ "loss": 1.1782,
+ "step": 2630
+ },
+ {
+ "epoch": 2.63174017692476,
+ "grad_norm": 0.6506078243255615,
+ "learning_rate": 7.269838734536771e-06,
+ "loss": 1.3225,
+ "step": 2631
+ },
+ {
+ "epoch": 2.632740458253884,
+ "grad_norm": 1.2154327630996704,
+ "learning_rate": 7.23065227601889e-06,
+ "loss": 1.4346,
+ "step": 2632
+ },
+ {
+ "epoch": 2.6337407395830077,
+ "grad_norm": 0.614640474319458,
+ "learning_rate": 7.1915677542368275e-06,
+ "loss": 1.1933,
+ "step": 2633
+ },
+ {
+ "epoch": 2.6347410209121316,
+ "grad_norm": 0.5466010570526123,
+ "learning_rate": 7.152585212137441e-06,
+ "loss": 0.9932,
+ "step": 2634
+ },
+ {
+ "epoch": 2.6357413022412555,
+ "grad_norm": 0.5916187763214111,
+ "learning_rate": 7.113704692555467e-06,
+ "loss": 1.1673,
+ "step": 2635
+ },
+ {
+ "epoch": 2.6367415835703794,
+ "grad_norm": 0.636686384677887,
+ "learning_rate": 7.0749262382135754e-06,
+ "loss": 1.158,
+ "step": 2636
+ },
+ {
+ "epoch": 2.6377418648995032,
+ "grad_norm": 0.6650112867355347,
+ "learning_rate": 7.03624989172228e-06,
+ "loss": 1.2022,
+ "step": 2637
+ },
+ {
+ "epoch": 2.6387421462286267,
+ "grad_norm": 0.7011269330978394,
+ "learning_rate": 6.997675695579897e-06,
+ "loss": 1.1948,
+ "step": 2638
+ },
+ {
+ "epoch": 2.6397424275577506,
+ "grad_norm": 0.7781692743301392,
+ "learning_rate": 6.9592036921724894e-06,
+ "loss": 1.3643,
+ "step": 2639
+ },
+ {
+ "epoch": 2.6407427088868745,
+ "grad_norm": 0.6225884556770325,
+ "learning_rate": 6.920833923773795e-06,
+ "loss": 1.3545,
+ "step": 2640
+ },
+ {
+ "epoch": 2.6417429902159983,
+ "grad_norm": 0.8119070529937744,
+ "learning_rate": 6.882566432545301e-06,
+ "loss": 1.2181,
+ "step": 2641
+ },
+ {
+ "epoch": 2.6427432715451222,
+ "grad_norm": 0.6620864868164062,
+ "learning_rate": 6.844401260536026e-06,
+ "loss": 1.3604,
+ "step": 2642
+ },
+ {
+ "epoch": 2.6437435528742457,
+ "grad_norm": 0.6014599204063416,
+ "learning_rate": 6.806338449682614e-06,
+ "loss": 1.0719,
+ "step": 2643
+ },
+ {
+ "epoch": 2.6447438342033696,
+ "grad_norm": 0.6501942276954651,
+ "learning_rate": 6.768378041809187e-06,
+ "loss": 1.2234,
+ "step": 2644
+ },
+ {
+ "epoch": 2.6457441155324934,
+ "grad_norm": 0.6102598309516907,
+ "learning_rate": 6.7305200786274e-06,
+ "loss": 1.3143,
+ "step": 2645
+ },
+ {
+ "epoch": 2.6467443968616173,
+ "grad_norm": 0.5882496237754822,
+ "learning_rate": 6.692764601736268e-06,
+ "loss": 1.3232,
+ "step": 2646
+ },
+ {
+ "epoch": 2.647744678190741,
+ "grad_norm": 0.60554438829422,
+ "learning_rate": 6.65511165262227e-06,
+ "loss": 1.1707,
+ "step": 2647
+ },
+ {
+ "epoch": 2.648744959519865,
+ "grad_norm": 0.6433539390563965,
+ "learning_rate": 6.617561272659156e-06,
+ "loss": 1.3457,
+ "step": 2648
+ },
+ {
+ "epoch": 2.649745240848989,
+ "grad_norm": 0.6486794948577881,
+ "learning_rate": 6.580113503108032e-06,
+ "loss": 1.275,
+ "step": 2649
+ },
+ {
+ "epoch": 2.6507455221781124,
+ "grad_norm": 0.6093539595603943,
+ "learning_rate": 6.542768385117193e-06,
+ "loss": 1.2013,
+ "step": 2650
+ },
+ {
+ "epoch": 2.6517458035072363,
+ "grad_norm": 0.7034041285514832,
+ "learning_rate": 6.505525959722181e-06,
+ "loss": 1.3831,
+ "step": 2651
+ },
+ {
+ "epoch": 2.65274608483636,
+ "grad_norm": 0.6973058581352234,
+ "learning_rate": 6.468386267845717e-06,
+ "loss": 1.1982,
+ "step": 2652
+ },
+ {
+ "epoch": 2.653746366165484,
+ "grad_norm": 0.6613555550575256,
+ "learning_rate": 6.431349350297555e-06,
+ "loss": 1.2528,
+ "step": 2653
+ },
+ {
+ "epoch": 2.654746647494608,
+ "grad_norm": 0.6921932101249695,
+ "learning_rate": 6.394415247774621e-06,
+ "loss": 1.3216,
+ "step": 2654
+ },
+ {
+ "epoch": 2.6557469288237314,
+ "grad_norm": 0.6463204622268677,
+ "learning_rate": 6.357584000860761e-06,
+ "loss": 1.4184,
+ "step": 2655
+ },
+ {
+ "epoch": 2.6567472101528553,
+ "grad_norm": 0.6264842748641968,
+ "learning_rate": 6.320855650026902e-06,
+ "loss": 1.3807,
+ "step": 2656
+ },
+ {
+ "epoch": 2.657747491481979,
+ "grad_norm": 0.6113883256912231,
+ "learning_rate": 6.284230235630828e-06,
+ "loss": 1.255,
+ "step": 2657
+ },
+ {
+ "epoch": 2.658747772811103,
+ "grad_norm": 0.6346296072006226,
+ "learning_rate": 6.247707797917257e-06,
+ "loss": 1.2995,
+ "step": 2658
+ },
+ {
+ "epoch": 2.659748054140227,
+ "grad_norm": 0.6059311032295227,
+ "learning_rate": 6.2112883770177545e-06,
+ "loss": 1.1414,
+ "step": 2659
+ },
+ {
+ "epoch": 2.660748335469351,
+ "grad_norm": 0.6291425824165344,
+ "learning_rate": 6.174972012950642e-06,
+ "loss": 1.3904,
+ "step": 2660
+ },
+ {
+ "epoch": 2.6617486167984747,
+ "grad_norm": 0.6590014100074768,
+ "learning_rate": 6.138758745621087e-06,
+ "loss": 1.3498,
+ "step": 2661
+ },
+ {
+ "epoch": 2.6627488981275986,
+ "grad_norm": 0.793444812297821,
+ "learning_rate": 6.102648614820861e-06,
+ "loss": 1.1894,
+ "step": 2662
+ },
+ {
+ "epoch": 2.663749179456722,
+ "grad_norm": 0.5633147358894348,
+ "learning_rate": 6.066641660228522e-06,
+ "loss": 1.2249,
+ "step": 2663
+ },
+ {
+ "epoch": 2.664749460785846,
+ "grad_norm": 0.711222231388092,
+ "learning_rate": 6.030737921409169e-06,
+ "loss": 1.3263,
+ "step": 2664
+ },
+ {
+ "epoch": 2.66574974211497,
+ "grad_norm": 0.6414635181427002,
+ "learning_rate": 5.994937437814518e-06,
+ "loss": 1.2656,
+ "step": 2665
+ },
+ {
+ "epoch": 2.6667500234440937,
+ "grad_norm": 0.6593098044395447,
+ "learning_rate": 5.95924024878286e-06,
+ "loss": 1.3774,
+ "step": 2666
+ },
+ {
+ "epoch": 2.6677503047732176,
+ "grad_norm": 0.9681949019432068,
+ "learning_rate": 5.9236463935389065e-06,
+ "loss": 1.2932,
+ "step": 2667
+ },
+ {
+ "epoch": 2.668750586102341,
+ "grad_norm": 0.6746291518211365,
+ "learning_rate": 5.888155911193893e-06,
+ "loss": 1.1149,
+ "step": 2668
+ },
+ {
+ "epoch": 2.669750867431465,
+ "grad_norm": 0.6293673515319824,
+ "learning_rate": 5.852768840745426e-06,
+ "loss": 1.1611,
+ "step": 2669
+ },
+ {
+ "epoch": 2.670751148760589,
+ "grad_norm": 0.6567217707633972,
+ "learning_rate": 5.817485221077523e-06,
+ "loss": 1.3288,
+ "step": 2670
+ },
+ {
+ "epoch": 2.6717514300897127,
+ "grad_norm": 0.648614764213562,
+ "learning_rate": 5.78230509096046e-06,
+ "loss": 1.3098,
+ "step": 2671
+ },
+ {
+ "epoch": 2.6727517114188366,
+ "grad_norm": 0.7265651226043701,
+ "learning_rate": 5.747228489050871e-06,
+ "loss": 1.1701,
+ "step": 2672
+ },
+ {
+ "epoch": 2.6737519927479605,
+ "grad_norm": 0.748150110244751,
+ "learning_rate": 5.71225545389158e-06,
+ "loss": 1.1506,
+ "step": 2673
+ },
+ {
+ "epoch": 2.6747522740770844,
+ "grad_norm": 0.5729747414588928,
+ "learning_rate": 5.677386023911646e-06,
+ "loss": 1.2625,
+ "step": 2674
+ },
+ {
+ "epoch": 2.6757525554062083,
+ "grad_norm": 0.6485134959220886,
+ "learning_rate": 5.6426202374262435e-06,
+ "loss": 1.2279,
+ "step": 2675
+ },
+ {
+ "epoch": 2.6767528367353317,
+ "grad_norm": 0.6100039482116699,
+ "learning_rate": 5.607958132636715e-06,
+ "loss": 1.0707,
+ "step": 2676
+ },
+ {
+ "epoch": 2.6777531180644556,
+ "grad_norm": 0.645415186882019,
+ "learning_rate": 5.573399747630403e-06,
+ "loss": 1.3327,
+ "step": 2677
+ },
+ {
+ "epoch": 2.6787533993935795,
+ "grad_norm": 0.6253712773323059,
+ "learning_rate": 5.538945120380767e-06,
+ "loss": 1.2773,
+ "step": 2678
+ },
+ {
+ "epoch": 2.6797536807227034,
+ "grad_norm": 0.6479336023330688,
+ "learning_rate": 5.504594288747189e-06,
+ "loss": 1.4397,
+ "step": 2679
+ },
+ {
+ "epoch": 2.680753962051827,
+ "grad_norm": 0.7058972716331482,
+ "learning_rate": 5.470347290474987e-06,
+ "loss": 1.2505,
+ "step": 2680
+ },
+ {
+ "epoch": 2.6817542433809507,
+ "grad_norm": 0.6058101654052734,
+ "learning_rate": 5.43620416319548e-06,
+ "loss": 1.0942,
+ "step": 2681
+ },
+ {
+ "epoch": 2.6827545247100746,
+ "grad_norm": 0.677739679813385,
+ "learning_rate": 5.402164944425758e-06,
+ "loss": 1.2213,
+ "step": 2682
+ },
+ {
+ "epoch": 2.6837548060391985,
+ "grad_norm": 0.5680402517318726,
+ "learning_rate": 5.36822967156878e-06,
+ "loss": 1.2916,
+ "step": 2683
+ },
+ {
+ "epoch": 2.6847550873683224,
+ "grad_norm": 0.5980328917503357,
+ "learning_rate": 5.334398381913252e-06,
+ "loss": 1.3965,
+ "step": 2684
+ },
+ {
+ "epoch": 2.6857553686974462,
+ "grad_norm": 0.6705754399299622,
+ "learning_rate": 5.300671112633671e-06,
+ "loss": 1.312,
+ "step": 2685
+ },
+ {
+ "epoch": 2.68675565002657,
+ "grad_norm": 0.6935957670211792,
+ "learning_rate": 5.267047900790201e-06,
+ "loss": 1.2965,
+ "step": 2686
+ },
+ {
+ "epoch": 2.687755931355694,
+ "grad_norm": 0.661569356918335,
+ "learning_rate": 5.233528783328634e-06,
+ "loss": 1.4045,
+ "step": 2687
+ },
+ {
+ "epoch": 2.6887562126848175,
+ "grad_norm": 0.577421247959137,
+ "learning_rate": 5.200113797080463e-06,
+ "loss": 1.2221,
+ "step": 2688
+ },
+ {
+ "epoch": 2.6897564940139413,
+ "grad_norm": 0.6115522384643555,
+ "learning_rate": 5.166802978762697e-06,
+ "loss": 1.3628,
+ "step": 2689
+ },
+ {
+ "epoch": 2.6907567753430652,
+ "grad_norm": 0.6134511828422546,
+ "learning_rate": 5.1335963649779e-06,
+ "loss": 1.388,
+ "step": 2690
+ },
+ {
+ "epoch": 2.691757056672189,
+ "grad_norm": 0.5624033212661743,
+ "learning_rate": 5.100493992214128e-06,
+ "loss": 1.1799,
+ "step": 2691
+ },
+ {
+ "epoch": 2.692757338001313,
+ "grad_norm": 0.561679482460022,
+ "learning_rate": 5.067495896844931e-06,
+ "loss": 1.1809,
+ "step": 2692
+ },
+ {
+ "epoch": 2.6937576193304364,
+ "grad_norm": 0.6090604662895203,
+ "learning_rate": 5.034602115129206e-06,
+ "loss": 1.0215,
+ "step": 2693
+ },
+ {
+ "epoch": 2.6947579006595603,
+ "grad_norm": 0.7113422751426697,
+ "learning_rate": 5.001812683211305e-06,
+ "loss": 1.2406,
+ "step": 2694
+ },
+ {
+ "epoch": 2.695758181988684,
+ "grad_norm": 0.6245151162147522,
+ "learning_rate": 4.969127637120863e-06,
+ "loss": 1.2332,
+ "step": 2695
+ },
+ {
+ "epoch": 2.696758463317808,
+ "grad_norm": 0.6761287450790405,
+ "learning_rate": 4.9365470127728404e-06,
+ "loss": 1.2538,
+ "step": 2696
+ },
+ {
+ "epoch": 2.697758744646932,
+ "grad_norm": 0.548460841178894,
+ "learning_rate": 4.904070845967468e-06,
+ "loss": 1.1693,
+ "step": 2697
+ },
+ {
+ "epoch": 2.698759025976056,
+ "grad_norm": 0.6992124915122986,
+ "learning_rate": 4.871699172390154e-06,
+ "loss": 1.411,
+ "step": 2698
+ },
+ {
+ "epoch": 2.6997593073051798,
+ "grad_norm": 0.6379924416542053,
+ "learning_rate": 4.839432027611535e-06,
+ "loss": 1.2836,
+ "step": 2699
+ },
+ {
+ "epoch": 2.7007595886343037,
+ "grad_norm": 0.6094527244567871,
+ "learning_rate": 4.807269447087348e-06,
+ "loss": 1.309,
+ "step": 2700
+ },
+ {
+ "epoch": 2.701759869963427,
+ "grad_norm": 0.8103051781654358,
+ "learning_rate": 4.775211466158469e-06,
+ "loss": 1.3922,
+ "step": 2701
+ },
+ {
+ "epoch": 2.702760151292551,
+ "grad_norm": 0.7398792505264282,
+ "learning_rate": 4.743258120050809e-06,
+ "loss": 1.1487,
+ "step": 2702
+ },
+ {
+ "epoch": 2.703760432621675,
+ "grad_norm": 0.5536444187164307,
+ "learning_rate": 4.7114094438753255e-06,
+ "loss": 1.151,
+ "step": 2703
+ },
+ {
+ "epoch": 2.7047607139507988,
+ "grad_norm": 0.5996678471565247,
+ "learning_rate": 4.679665472627947e-06,
+ "loss": 1.0076,
+ "step": 2704
+ },
+ {
+ "epoch": 2.7057609952799226,
+ "grad_norm": 0.6564775109291077,
+ "learning_rate": 4.648026241189563e-06,
+ "loss": 1.2467,
+ "step": 2705
+ },
+ {
+ "epoch": 2.706761276609046,
+ "grad_norm": 0.6396240592002869,
+ "learning_rate": 4.616491784325972e-06,
+ "loss": 1.2985,
+ "step": 2706
+ },
+ {
+ "epoch": 2.70776155793817,
+ "grad_norm": 0.594692587852478,
+ "learning_rate": 4.585062136687812e-06,
+ "loss": 1.3143,
+ "step": 2707
+ },
+ {
+ "epoch": 2.708761839267294,
+ "grad_norm": 0.6686044335365295,
+ "learning_rate": 4.553737332810614e-06,
+ "loss": 1.3073,
+ "step": 2708
+ },
+ {
+ "epoch": 2.7097621205964177,
+ "grad_norm": 0.8201797604560852,
+ "learning_rate": 4.5225174071146455e-06,
+ "loss": 1.4223,
+ "step": 2709
+ },
+ {
+ "epoch": 2.7107624019255416,
+ "grad_norm": 0.7238805890083313,
+ "learning_rate": 4.4914023939049755e-06,
+ "loss": 1.3119,
+ "step": 2710
+ },
+ {
+ "epoch": 2.7117626832546655,
+ "grad_norm": 0.6857038736343384,
+ "learning_rate": 4.460392327371377e-06,
+ "loss": 1.3138,
+ "step": 2711
+ },
+ {
+ "epoch": 2.7127629645837894,
+ "grad_norm": 0.6083630323410034,
+ "learning_rate": 4.429487241588304e-06,
+ "loss": 1.2514,
+ "step": 2712
+ },
+ {
+ "epoch": 2.713763245912913,
+ "grad_norm": 0.8787251114845276,
+ "learning_rate": 4.398687170514859e-06,
+ "loss": 1.4092,
+ "step": 2713
+ },
+ {
+ "epoch": 2.7147635272420367,
+ "grad_norm": 0.7057076096534729,
+ "learning_rate": 4.367992147994738e-06,
+ "loss": 1.2285,
+ "step": 2714
+ },
+ {
+ "epoch": 2.7157638085711606,
+ "grad_norm": 0.6430444121360779,
+ "learning_rate": 4.337402207756236e-06,
+ "loss": 1.4352,
+ "step": 2715
+ },
+ {
+ "epoch": 2.7167640899002845,
+ "grad_norm": 0.5882517099380493,
+ "learning_rate": 4.306917383412134e-06,
+ "loss": 1.2691,
+ "step": 2716
+ },
+ {
+ "epoch": 2.7177643712294084,
+ "grad_norm": 0.7088742852210999,
+ "learning_rate": 4.276537708459783e-06,
+ "loss": 1.2782,
+ "step": 2717
+ },
+ {
+ "epoch": 2.718764652558532,
+ "grad_norm": 0.6109272837638855,
+ "learning_rate": 4.24626321628091e-06,
+ "loss": 1.2888,
+ "step": 2718
+ },
+ {
+ "epoch": 2.7197649338876557,
+ "grad_norm": 0.558168351650238,
+ "learning_rate": 4.2160939401417524e-06,
+ "loss": 1.1591,
+ "step": 2719
+ },
+ {
+ "epoch": 2.7207652152167796,
+ "grad_norm": 0.6718723177909851,
+ "learning_rate": 4.186029913192846e-06,
+ "loss": 1.3608,
+ "step": 2720
+ },
+ {
+ "epoch": 2.7217654965459035,
+ "grad_norm": 0.6965057849884033,
+ "learning_rate": 4.156071168469145e-06,
+ "loss": 1.1904,
+ "step": 2721
+ },
+ {
+ "epoch": 2.7227657778750274,
+ "grad_norm": 0.6064279079437256,
+ "learning_rate": 4.12621773888987e-06,
+ "loss": 1.1858,
+ "step": 2722
+ },
+ {
+ "epoch": 2.7237660592041513,
+ "grad_norm": 0.665644645690918,
+ "learning_rate": 4.096469657258573e-06,
+ "loss": 1.3999,
+ "step": 2723
+ },
+ {
+ "epoch": 2.724766340533275,
+ "grad_norm": 0.6393682956695557,
+ "learning_rate": 4.066826956262981e-06,
+ "loss": 1.2492,
+ "step": 2724
+ },
+ {
+ "epoch": 2.725766621862399,
+ "grad_norm": 0.6236796975135803,
+ "learning_rate": 4.037289668475086e-06,
+ "loss": 1.2377,
+ "step": 2725
+ },
+ {
+ "epoch": 2.7267669031915225,
+ "grad_norm": 0.641946017742157,
+ "learning_rate": 4.007857826351024e-06,
+ "loss": 1.4551,
+ "step": 2726
+ },
+ {
+ "epoch": 2.7277671845206464,
+ "grad_norm": 0.6818437576293945,
+ "learning_rate": 3.9785314622310495e-06,
+ "loss": 1.2524,
+ "step": 2727
+ },
+ {
+ "epoch": 2.7287674658497703,
+ "grad_norm": 0.5964662432670593,
+ "learning_rate": 3.949310608339552e-06,
+ "loss": 1.1441,
+ "step": 2728
+ },
+ {
+ "epoch": 2.729767747178894,
+ "grad_norm": 0.5980690717697144,
+ "learning_rate": 3.9201952967849565e-06,
+ "loss": 1.1601,
+ "step": 2729
+ },
+ {
+ "epoch": 2.730768028508018,
+ "grad_norm": 0.6172769665718079,
+ "learning_rate": 3.8911855595597295e-06,
+ "loss": 1.1752,
+ "step": 2730
+ },
+ {
+ "epoch": 2.7317683098371415,
+ "grad_norm": 0.6396147608757019,
+ "learning_rate": 3.862281428540315e-06,
+ "loss": 1.2828,
+ "step": 2731
+ },
+ {
+ "epoch": 2.7327685911662654,
+ "grad_norm": 0.6444851160049438,
+ "learning_rate": 3.8334829354871315e-06,
+ "loss": 1.2791,
+ "step": 2732
+ },
+ {
+ "epoch": 2.7337688724953892,
+ "grad_norm": 0.698672354221344,
+ "learning_rate": 3.8047901120445316e-06,
+ "loss": 1.1439,
+ "step": 2733
+ },
+ {
+ "epoch": 2.734769153824513,
+ "grad_norm": 0.6475120782852173,
+ "learning_rate": 3.776202989740707e-06,
+ "loss": 1.2733,
+ "step": 2734
+ },
+ {
+ "epoch": 2.735769435153637,
+ "grad_norm": 0.6438625454902649,
+ "learning_rate": 3.7477215999877635e-06,
+ "loss": 1.3872,
+ "step": 2735
+ },
+ {
+ "epoch": 2.736769716482761,
+ "grad_norm": 0.673586368560791,
+ "learning_rate": 3.7193459740815674e-06,
+ "loss": 1.2358,
+ "step": 2736
+ },
+ {
+ "epoch": 2.737769997811885,
+ "grad_norm": 0.6692418456077576,
+ "learning_rate": 3.6910761432018328e-06,
+ "loss": 1.4557,
+ "step": 2737
+ },
+ {
+ "epoch": 2.7387702791410087,
+ "grad_norm": 0.5957900285720825,
+ "learning_rate": 3.662912138411967e-06,
+ "loss": 1.1262,
+ "step": 2738
+ },
+ {
+ "epoch": 2.739770560470132,
+ "grad_norm": 0.5733324885368347,
+ "learning_rate": 3.634853990659126e-06,
+ "loss": 1.1136,
+ "step": 2739
+ },
+ {
+ "epoch": 2.740770841799256,
+ "grad_norm": 0.5956900715827942,
+ "learning_rate": 3.606901730774159e-06,
+ "loss": 1.2082,
+ "step": 2740
+ },
+ {
+ "epoch": 2.74177112312838,
+ "grad_norm": 0.6367376446723938,
+ "learning_rate": 3.5790553894715085e-06,
+ "loss": 1.2172,
+ "step": 2741
+ },
+ {
+ "epoch": 2.742771404457504,
+ "grad_norm": 0.6215959787368774,
+ "learning_rate": 3.5513149973492976e-06,
+ "loss": 1.2861,
+ "step": 2742
+ },
+ {
+ "epoch": 2.7437716857866272,
+ "grad_norm": 0.6744598746299744,
+ "learning_rate": 3.5236805848891886e-06,
+ "loss": 1.3391,
+ "step": 2743
+ },
+ {
+ "epoch": 2.744771967115751,
+ "grad_norm": 0.6197530627250671,
+ "learning_rate": 3.4961521824564026e-06,
+ "loss": 1.098,
+ "step": 2744
+ },
+ {
+ "epoch": 2.745772248444875,
+ "grad_norm": 0.7133039832115173,
+ "learning_rate": 3.4687298202996655e-06,
+ "loss": 1.0635,
+ "step": 2745
+ },
+ {
+ "epoch": 2.746772529773999,
+ "grad_norm": 0.5788673162460327,
+ "learning_rate": 3.4414135285512183e-06,
+ "loss": 1.1141,
+ "step": 2746
+ },
+ {
+ "epoch": 2.7477728111031228,
+ "grad_norm": 0.5817285776138306,
+ "learning_rate": 3.4142033372266957e-06,
+ "loss": 1.1674,
+ "step": 2747
+ },
+ {
+ "epoch": 2.7487730924322467,
+ "grad_norm": 0.6814209222793579,
+ "learning_rate": 3.3870992762252143e-06,
+ "loss": 1.2865,
+ "step": 2748
+ },
+ {
+ "epoch": 2.7497733737613705,
+ "grad_norm": 0.6004665493965149,
+ "learning_rate": 3.3601013753291945e-06,
+ "loss": 0.9818,
+ "step": 2749
+ },
+ {
+ "epoch": 2.7507736550904944,
+ "grad_norm": 0.6021395325660706,
+ "learning_rate": 3.333209664204473e-06,
+ "loss": 1.3455,
+ "step": 2750
+ },
+ {
+ "epoch": 2.751773936419618,
+ "grad_norm": 0.7030460834503174,
+ "learning_rate": 3.3064241724001797e-06,
+ "loss": 1.1192,
+ "step": 2751
+ },
+ {
+ "epoch": 2.7527742177487418,
+ "grad_norm": 2.5236783027648926,
+ "learning_rate": 3.2797449293487048e-06,
+ "loss": 1.2202,
+ "step": 2752
+ },
+ {
+ "epoch": 2.7537744990778656,
+ "grad_norm": 0.622873067855835,
+ "learning_rate": 3.253171964365731e-06,
+ "loss": 1.2149,
+ "step": 2753
+ },
+ {
+ "epoch": 2.7547747804069895,
+ "grad_norm": 0.7031694054603577,
+ "learning_rate": 3.226705306650113e-06,
+ "loss": 1.3004,
+ "step": 2754
+ },
+ {
+ "epoch": 2.7557750617361134,
+ "grad_norm": 0.6384484767913818,
+ "learning_rate": 3.200344985283965e-06,
+ "loss": 0.9886,
+ "step": 2755
+ },
+ {
+ "epoch": 2.756775343065237,
+ "grad_norm": 0.6320153474807739,
+ "learning_rate": 3.174091029232473e-06,
+ "loss": 1.3309,
+ "step": 2756
+ },
+ {
+ "epoch": 2.7577756243943607,
+ "grad_norm": 0.9083541631698608,
+ "learning_rate": 3.1479434673440167e-06,
+ "loss": 1.1482,
+ "step": 2757
+ },
+ {
+ "epoch": 2.7587759057234846,
+ "grad_norm": 0.6994872689247131,
+ "learning_rate": 3.1219023283500238e-06,
+ "loss": 1.2869,
+ "step": 2758
+ },
+ {
+ "epoch": 2.7597761870526085,
+ "grad_norm": 0.5944600701332092,
+ "learning_rate": 3.095967640864983e-06,
+ "loss": 1.1317,
+ "step": 2759
+ },
+ {
+ "epoch": 2.7607764683817324,
+ "grad_norm": 0.6451122760772705,
+ "learning_rate": 3.070139433386454e-06,
+ "loss": 1.117,
+ "step": 2760
+ },
+ {
+ "epoch": 2.7617767497108563,
+ "grad_norm": 0.6910557746887207,
+ "learning_rate": 3.0444177342949464e-06,
+ "loss": 1.2522,
+ "step": 2761
+ },
+ {
+ "epoch": 2.76277703103998,
+ "grad_norm": 0.6909431219100952,
+ "learning_rate": 3.0188025718539624e-06,
+ "loss": 1.2434,
+ "step": 2762
+ },
+ {
+ "epoch": 2.763777312369104,
+ "grad_norm": 0.6212828159332275,
+ "learning_rate": 2.9932939742099208e-06,
+ "loss": 1.289,
+ "step": 2763
+ },
+ {
+ "epoch": 2.7647775936982275,
+ "grad_norm": 0.6849749088287354,
+ "learning_rate": 2.9678919693921894e-06,
+ "loss": 1.2224,
+ "step": 2764
+ },
+ {
+ "epoch": 2.7657778750273514,
+ "grad_norm": 0.5868197083473206,
+ "learning_rate": 2.942596585312929e-06,
+ "loss": 1.204,
+ "step": 2765
+ },
+ {
+ "epoch": 2.7667781563564753,
+ "grad_norm": 0.6371617913246155,
+ "learning_rate": 2.9174078497672397e-06,
+ "loss": 1.4273,
+ "step": 2766
+ },
+ {
+ "epoch": 2.767778437685599,
+ "grad_norm": 0.5671185255050659,
+ "learning_rate": 2.892325790432948e-06,
+ "loss": 1.1148,
+ "step": 2767
+ },
+ {
+ "epoch": 2.768778719014723,
+ "grad_norm": 0.66547691822052,
+ "learning_rate": 2.8673504348707412e-06,
+ "loss": 1.3317,
+ "step": 2768
+ },
+ {
+ "epoch": 2.7697790003438465,
+ "grad_norm": 0.6540102362632751,
+ "learning_rate": 2.842481810523978e-06,
+ "loss": 1.4247,
+ "step": 2769
+ },
+ {
+ "epoch": 2.7707792816729704,
+ "grad_norm": 0.6490374803543091,
+ "learning_rate": 2.8177199447187994e-06,
+ "loss": 1.234,
+ "step": 2770
+ },
+ {
+ "epoch": 2.7717795630020943,
+ "grad_norm": 0.5828781127929688,
+ "learning_rate": 2.7930648646640188e-06,
+ "loss": 1.3629,
+ "step": 2771
+ },
+ {
+ "epoch": 2.772779844331218,
+ "grad_norm": 0.6953780055046082,
+ "learning_rate": 2.7685165974510986e-06,
+ "loss": 1.2427,
+ "step": 2772
+ },
+ {
+ "epoch": 2.773780125660342,
+ "grad_norm": 0.7044670581817627,
+ "learning_rate": 2.744075170054161e-06,
+ "loss": 1.0625,
+ "step": 2773
+ },
+ {
+ "epoch": 2.774780406989466,
+ "grad_norm": 0.6625652313232422,
+ "learning_rate": 2.7197406093299018e-06,
+ "loss": 1.1655,
+ "step": 2774
+ },
+ {
+ "epoch": 2.77578068831859,
+ "grad_norm": 0.7223336696624756,
+ "learning_rate": 2.6955129420176196e-06,
+ "loss": 1.335,
+ "step": 2775
+ },
+ {
+ "epoch": 2.7767809696477133,
+ "grad_norm": 0.6766147017478943,
+ "learning_rate": 2.6713921947391085e-06,
+ "loss": 1.3142,
+ "step": 2776
+ },
+ {
+ "epoch": 2.777781250976837,
+ "grad_norm": 0.6236035227775574,
+ "learning_rate": 2.647378393998745e-06,
+ "loss": 1.2358,
+ "step": 2777
+ },
+ {
+ "epoch": 2.778781532305961,
+ "grad_norm": 0.6155989766120911,
+ "learning_rate": 2.623471566183322e-06,
+ "loss": 1.0893,
+ "step": 2778
+ },
+ {
+ "epoch": 2.779781813635085,
+ "grad_norm": 0.5907763242721558,
+ "learning_rate": 2.599671737562137e-06,
+ "loss": 1.3231,
+ "step": 2779
+ },
+ {
+ "epoch": 2.780782094964209,
+ "grad_norm": 0.639797031879425,
+ "learning_rate": 2.5759789342868935e-06,
+ "loss": 1.3483,
+ "step": 2780
+ },
+ {
+ "epoch": 2.7817823762933322,
+ "grad_norm": 0.5395955443382263,
+ "learning_rate": 2.552393182391677e-06,
+ "loss": 1.2079,
+ "step": 2781
+ },
+ {
+ "epoch": 2.782782657622456,
+ "grad_norm": 0.5336273312568665,
+ "learning_rate": 2.528914507793001e-06,
+ "loss": 1.1289,
+ "step": 2782
+ },
+ {
+ "epoch": 2.78378293895158,
+ "grad_norm": 0.6160317659378052,
+ "learning_rate": 2.505542936289651e-06,
+ "loss": 1.2678,
+ "step": 2783
+ },
+ {
+ "epoch": 2.784783220280704,
+ "grad_norm": 0.5914292931556702,
+ "learning_rate": 2.482278493562784e-06,
+ "loss": 1.2231,
+ "step": 2784
+ },
+ {
+ "epoch": 2.785783501609828,
+ "grad_norm": 0.6742945909500122,
+ "learning_rate": 2.4591212051757962e-06,
+ "loss": 1.2062,
+ "step": 2785
+ },
+ {
+ "epoch": 2.7867837829389517,
+ "grad_norm": 0.6824519634246826,
+ "learning_rate": 2.436071096574366e-06,
+ "loss": 1.4535,
+ "step": 2786
+ },
+ {
+ "epoch": 2.7877840642680756,
+ "grad_norm": 0.6473540663719177,
+ "learning_rate": 2.4131281930864002e-06,
+ "loss": 1.2684,
+ "step": 2787
+ },
+ {
+ "epoch": 2.7887843455971995,
+ "grad_norm": 0.6114115118980408,
+ "learning_rate": 2.390292519921977e-06,
+ "loss": 1.4267,
+ "step": 2788
+ },
+ {
+ "epoch": 2.789784626926323,
+ "grad_norm": 0.61500483751297,
+ "learning_rate": 2.367564102173403e-06,
+ "loss": 1.1804,
+ "step": 2789
+ },
+ {
+ "epoch": 2.790784908255447,
+ "grad_norm": 0.6327043175697327,
+ "learning_rate": 2.3449429648150665e-06,
+ "loss": 1.0484,
+ "step": 2790
+ },
+ {
+ "epoch": 2.7917851895845707,
+ "grad_norm": 0.6302153468132019,
+ "learning_rate": 2.3224291327035407e-06,
+ "loss": 1.314,
+ "step": 2791
+ },
+ {
+ "epoch": 2.7927854709136946,
+ "grad_norm": 0.7265939116477966,
+ "learning_rate": 2.3000226305774255e-06,
+ "loss": 1.3758,
+ "step": 2792
+ },
+ {
+ "epoch": 2.7937857522428184,
+ "grad_norm": 0.6297316551208496,
+ "learning_rate": 2.277723483057448e-06,
+ "loss": 1.2193,
+ "step": 2793
+ },
+ {
+ "epoch": 2.794786033571942,
+ "grad_norm": 0.7096316814422607,
+ "learning_rate": 2.2555317146462975e-06,
+ "loss": 1.2396,
+ "step": 2794
+ },
+ {
+ "epoch": 2.7957863149010658,
+ "grad_norm": 0.7071268558502197,
+ "learning_rate": 2.2334473497287454e-06,
+ "loss": 1.3833,
+ "step": 2795
+ },
+ {
+ "epoch": 2.7967865962301897,
+ "grad_norm": 0.6459008455276489,
+ "learning_rate": 2.2114704125714924e-06,
+ "loss": 1.0338,
+ "step": 2796
+ },
+ {
+ "epoch": 2.7977868775593135,
+ "grad_norm": 0.6362354755401611,
+ "learning_rate": 2.1896009273232433e-06,
+ "loss": 1.2881,
+ "step": 2797
+ },
+ {
+ "epoch": 2.7987871588884374,
+ "grad_norm": 0.7056740522384644,
+ "learning_rate": 2.1678389180145865e-06,
+ "loss": 1.3902,
+ "step": 2798
+ },
+ {
+ "epoch": 2.7997874402175613,
+ "grad_norm": 0.6031796932220459,
+ "learning_rate": 2.1461844085580385e-06,
+ "loss": 1.0237,
+ "step": 2799
+ },
+ {
+ "epoch": 2.800787721546685,
+ "grad_norm": 0.8180059790611267,
+ "learning_rate": 2.124637422747999e-06,
+ "loss": 1.2157,
+ "step": 2800
+ },
+ {
+ "epoch": 2.801788002875809,
+ "grad_norm": 0.6587545275688171,
+ "learning_rate": 2.1031979842606853e-06,
+ "loss": 1.1683,
+ "step": 2801
+ },
+ {
+ "epoch": 2.8027882842049325,
+ "grad_norm": 0.6571118831634521,
+ "learning_rate": 2.0818661166542074e-06,
+ "loss": 1.1751,
+ "step": 2802
+ },
+ {
+ "epoch": 2.8037885655340564,
+ "grad_norm": 0.6493355631828308,
+ "learning_rate": 2.0606418433683828e-06,
+ "loss": 1.1102,
+ "step": 2803
+ },
+ {
+ "epoch": 2.8047888468631803,
+ "grad_norm": 0.6424496173858643,
+ "learning_rate": 2.0395251877248778e-06,
+ "loss": 1.1402,
+ "step": 2804
+ },
+ {
+ "epoch": 2.805789128192304,
+ "grad_norm": 0.6075500249862671,
+ "learning_rate": 2.0185161729270653e-06,
+ "loss": 1.3684,
+ "step": 2805
+ },
+ {
+ "epoch": 2.8067894095214276,
+ "grad_norm": 0.972443699836731,
+ "learning_rate": 1.9976148220600457e-06,
+ "loss": 1.3141,
+ "step": 2806
+ },
+ {
+ "epoch": 2.8077896908505515,
+ "grad_norm": 0.5574498772621155,
+ "learning_rate": 1.976821158090647e-06,
+ "loss": 1.2447,
+ "step": 2807
+ },
+ {
+ "epoch": 2.8087899721796754,
+ "grad_norm": 0.720353364944458,
+ "learning_rate": 1.9561352038673263e-06,
+ "loss": 1.2412,
+ "step": 2808
+ },
+ {
+ "epoch": 2.8097902535087993,
+ "grad_norm": 0.7458455562591553,
+ "learning_rate": 1.9355569821202234e-06,
+ "loss": 1.2114,
+ "step": 2809
+ },
+ {
+ "epoch": 2.810790534837923,
+ "grad_norm": 0.6584808230400085,
+ "learning_rate": 1.915086515461062e-06,
+ "loss": 1.2203,
+ "step": 2810
+ },
+ {
+ "epoch": 2.811790816167047,
+ "grad_norm": 0.6349656581878662,
+ "learning_rate": 1.8947238263832046e-06,
+ "loss": 1.1031,
+ "step": 2811
+ },
+ {
+ "epoch": 2.812791097496171,
+ "grad_norm": 0.6814787983894348,
+ "learning_rate": 1.874468937261531e-06,
+ "loss": 1.4014,
+ "step": 2812
+ },
+ {
+ "epoch": 2.813791378825295,
+ "grad_norm": 0.6562025547027588,
+ "learning_rate": 1.8543218703525378e-06,
+ "loss": 1.2268,
+ "step": 2813
+ },
+ {
+ "epoch": 2.8147916601544183,
+ "grad_norm": 0.69455885887146,
+ "learning_rate": 1.8342826477941944e-06,
+ "loss": 1.4468,
+ "step": 2814
+ },
+ {
+ "epoch": 2.815791941483542,
+ "grad_norm": 0.691709578037262,
+ "learning_rate": 1.8143512916059646e-06,
+ "loss": 1.2449,
+ "step": 2815
+ },
+ {
+ "epoch": 2.816792222812666,
+ "grad_norm": 0.6932084560394287,
+ "learning_rate": 1.7945278236888408e-06,
+ "loss": 1.3314,
+ "step": 2816
+ },
+ {
+ "epoch": 2.81779250414179,
+ "grad_norm": 0.6960920691490173,
+ "learning_rate": 1.7748122658251876e-06,
+ "loss": 1.3686,
+ "step": 2817
+ },
+ {
+ "epoch": 2.818792785470914,
+ "grad_norm": 0.5477668046951294,
+ "learning_rate": 1.7552046396788757e-06,
+ "loss": 1.2019,
+ "step": 2818
+ },
+ {
+ "epoch": 2.8197930668000373,
+ "grad_norm": 0.6102869510650635,
+ "learning_rate": 1.7357049667951043e-06,
+ "loss": 1.2675,
+ "step": 2819
+ },
+ {
+ "epoch": 2.820793348129161,
+ "grad_norm": 0.5731253027915955,
+ "learning_rate": 1.7163132686005223e-06,
+ "loss": 1.324,
+ "step": 2820
+ },
+ {
+ "epoch": 2.821793629458285,
+ "grad_norm": 0.6474632620811462,
+ "learning_rate": 1.6970295664030745e-06,
+ "loss": 1.1748,
+ "step": 2821
+ },
+ {
+ "epoch": 2.822793910787409,
+ "grad_norm": 0.603455126285553,
+ "learning_rate": 1.6778538813920775e-06,
+ "loss": 1.3523,
+ "step": 2822
+ },
+ {
+ "epoch": 2.823794192116533,
+ "grad_norm": 0.558164119720459,
+ "learning_rate": 1.6587862346381321e-06,
+ "loss": 1.2071,
+ "step": 2823
+ },
+ {
+ "epoch": 2.8247944734456567,
+ "grad_norm": 0.5928383469581604,
+ "learning_rate": 1.6398266470931344e-06,
+ "loss": 1.1583,
+ "step": 2824
+ },
+ {
+ "epoch": 2.8257947547747806,
+ "grad_norm": 0.65920090675354,
+ "learning_rate": 1.6209751395902417e-06,
+ "loss": 1.5207,
+ "step": 2825
+ },
+ {
+ "epoch": 2.8267950361039045,
+ "grad_norm": 0.5846977233886719,
+ "learning_rate": 1.6022317328438506e-06,
+ "loss": 1.2424,
+ "step": 2826
+ },
+ {
+ "epoch": 2.827795317433028,
+ "grad_norm": 0.7116659283638,
+ "learning_rate": 1.5835964474495868e-06,
+ "loss": 1.4457,
+ "step": 2827
+ },
+ {
+ "epoch": 2.828795598762152,
+ "grad_norm": 0.6061758995056152,
+ "learning_rate": 1.5650693038842367e-06,
+ "loss": 1.2649,
+ "step": 2828
+ },
+ {
+ "epoch": 2.8297958800912757,
+ "grad_norm": 0.6263569593429565,
+ "learning_rate": 1.5466503225058048e-06,
+ "loss": 1.3504,
+ "step": 2829
+ },
+ {
+ "epoch": 2.8307961614203996,
+ "grad_norm": 0.6007553935050964,
+ "learning_rate": 1.5283395235534015e-06,
+ "loss": 1.3416,
+ "step": 2830
+ },
+ {
+ "epoch": 2.8317964427495235,
+ "grad_norm": 0.6499843597412109,
+ "learning_rate": 1.5101369271472988e-06,
+ "loss": 1.3743,
+ "step": 2831
+ },
+ {
+ "epoch": 2.832796724078647,
+ "grad_norm": 0.6581957340240479,
+ "learning_rate": 1.4920425532888526e-06,
+ "loss": 1.2113,
+ "step": 2832
+ },
+ {
+ "epoch": 2.833797005407771,
+ "grad_norm": 0.6455169320106506,
+ "learning_rate": 1.4740564218605035e-06,
+ "loss": 1.1763,
+ "step": 2833
+ },
+ {
+ "epoch": 2.8347972867368947,
+ "grad_norm": 0.55152827501297,
+ "learning_rate": 1.4561785526257533e-06,
+ "loss": 1.2854,
+ "step": 2834
+ },
+ {
+ "epoch": 2.8357975680660186,
+ "grad_norm": 0.6238963603973389,
+ "learning_rate": 1.4384089652291543e-06,
+ "loss": 1.281,
+ "step": 2835
+ },
+ {
+ "epoch": 2.8367978493951425,
+ "grad_norm": 0.5785467028617859,
+ "learning_rate": 1.420747679196277e-06,
+ "loss": 1.2338,
+ "step": 2836
+ },
+ {
+ "epoch": 2.8377981307242663,
+ "grad_norm": 0.6000106930732727,
+ "learning_rate": 1.4031947139336643e-06,
+ "loss": 1.0418,
+ "step": 2837
+ },
+ {
+ "epoch": 2.8387984120533902,
+ "grad_norm": 0.6103694438934326,
+ "learning_rate": 1.3857500887288544e-06,
+ "loss": 1.0706,
+ "step": 2838
+ },
+ {
+ "epoch": 2.8397986933825137,
+ "grad_norm": 0.6325921416282654,
+ "learning_rate": 1.3684138227503474e-06,
+ "loss": 1.2471,
+ "step": 2839
+ },
+ {
+ "epoch": 2.8407989747116376,
+ "grad_norm": 0.6974677443504333,
+ "learning_rate": 1.3511859350475497e-06,
+ "loss": 1.2671,
+ "step": 2840
+ },
+ {
+ "epoch": 2.8417992560407614,
+ "grad_norm": 0.7803261280059814,
+ "learning_rate": 1.3340664445507966e-06,
+ "loss": 1.2838,
+ "step": 2841
+ },
+ {
+ "epoch": 2.8427995373698853,
+ "grad_norm": 0.5813845992088318,
+ "learning_rate": 1.3170553700713294e-06,
+ "loss": 1.0119,
+ "step": 2842
+ },
+ {
+ "epoch": 2.843799818699009,
+ "grad_norm": 0.6487960815429688,
+ "learning_rate": 1.3001527303012184e-06,
+ "loss": 1.2867,
+ "step": 2843
+ },
+ {
+ "epoch": 2.8448001000281327,
+ "grad_norm": 0.6631714701652527,
+ "learning_rate": 1.2833585438134287e-06,
+ "loss": 1.1298,
+ "step": 2844
+ },
+ {
+ "epoch": 2.8458003813572565,
+ "grad_norm": 0.5860957503318787,
+ "learning_rate": 1.2666728290617213e-06,
+ "loss": 1.1824,
+ "step": 2845
+ },
+ {
+ "epoch": 2.8468006626863804,
+ "grad_norm": 0.6235078573226929,
+ "learning_rate": 1.2500956043806744e-06,
+ "loss": 1.1826,
+ "step": 2846
+ },
+ {
+ "epoch": 2.8478009440155043,
+ "grad_norm": 0.6575687527656555,
+ "learning_rate": 1.2336268879856727e-06,
+ "loss": 1.3503,
+ "step": 2847
+ },
+ {
+ "epoch": 2.848801225344628,
+ "grad_norm": 0.7656979560852051,
+ "learning_rate": 1.217266697972852e-06,
+ "loss": 1.4424,
+ "step": 2848
+ },
+ {
+ "epoch": 2.849801506673752,
+ "grad_norm": 0.6069021821022034,
+ "learning_rate": 1.201015052319099e-06,
+ "loss": 1.2036,
+ "step": 2849
+ },
+ {
+ "epoch": 2.850801788002876,
+ "grad_norm": 0.523091733455658,
+ "learning_rate": 1.1848719688820398e-06,
+ "loss": 1.0327,
+ "step": 2850
+ },
+ {
+ "epoch": 2.851802069332,
+ "grad_norm": 0.6629632711410522,
+ "learning_rate": 1.168837465400008e-06,
+ "loss": 1.4209,
+ "step": 2851
+ },
+ {
+ "epoch": 2.8528023506611233,
+ "grad_norm": 0.7719458937644958,
+ "learning_rate": 1.1529115594920092e-06,
+ "loss": 1.2403,
+ "step": 2852
+ },
+ {
+ "epoch": 2.853802631990247,
+ "grad_norm": 0.6386509537696838,
+ "learning_rate": 1.1370942686577347e-06,
+ "loss": 1.1935,
+ "step": 2853
+ },
+ {
+ "epoch": 2.854802913319371,
+ "grad_norm": 0.6315529942512512,
+ "learning_rate": 1.1213856102775366e-06,
+ "loss": 1.1605,
+ "step": 2854
+ },
+ {
+ "epoch": 2.855803194648495,
+ "grad_norm": 0.8970243334770203,
+ "learning_rate": 1.1057856016123858e-06,
+ "loss": 1.2303,
+ "step": 2855
+ },
+ {
+ "epoch": 2.856803475977619,
+ "grad_norm": 0.6587148308753967,
+ "learning_rate": 1.09029425980387e-06,
+ "loss": 1.4478,
+ "step": 2856
+ },
+ {
+ "epoch": 2.8578037573067423,
+ "grad_norm": 0.7532643675804138,
+ "learning_rate": 1.0749116018741623e-06,
+ "loss": 1.1733,
+ "step": 2857
+ },
+ {
+ "epoch": 2.858804038635866,
+ "grad_norm": 1.022985577583313,
+ "learning_rate": 1.0596376447260414e-06,
+ "loss": 1.147,
+ "step": 2858
+ },
+ {
+ "epoch": 2.85980431996499,
+ "grad_norm": 0.6655153632164001,
+ "learning_rate": 1.0444724051428155e-06,
+ "loss": 1.3896,
+ "step": 2859
+ },
+ {
+ "epoch": 2.860804601294114,
+ "grad_norm": 0.6291083693504333,
+ "learning_rate": 1.029415899788322e-06,
+ "loss": 1.2025,
+ "step": 2860
+ },
+ {
+ "epoch": 2.861804882623238,
+ "grad_norm": 0.6509825587272644,
+ "learning_rate": 1.0144681452069703e-06,
+ "loss": 1.3408,
+ "step": 2861
+ },
+ {
+ "epoch": 2.8628051639523617,
+ "grad_norm": 0.6753568649291992,
+ "learning_rate": 9.996291578236228e-07,
+ "loss": 1.3527,
+ "step": 2862
+ },
+ {
+ "epoch": 2.8638054452814856,
+ "grad_norm": 0.5860418081283569,
+ "learning_rate": 9.84898953943636e-07,
+ "loss": 1.3305,
+ "step": 2863
+ },
+ {
+ "epoch": 2.8648057266106095,
+ "grad_norm": 0.6797496676445007,
+ "learning_rate": 9.702775497528516e-07,
+ "loss": 1.3676,
+ "step": 2864
+ },
+ {
+ "epoch": 2.865806007939733,
+ "grad_norm": 0.6594141125679016,
+ "learning_rate": 9.5576496131754e-07,
+ "loss": 1.0028,
+ "step": 2865
+ },
+ {
+ "epoch": 2.866806289268857,
+ "grad_norm": 0.5467631816864014,
+ "learning_rate": 9.413612045844234e-07,
+ "loss": 1.0987,
+ "step": 2866
+ },
+ {
+ "epoch": 2.8678065705979807,
+ "grad_norm": 0.7644015550613403,
+ "learning_rate": 9.270662953806186e-07,
+ "loss": 1.3244,
+ "step": 2867
+ },
+ {
+ "epoch": 2.8688068519271046,
+ "grad_norm": 0.6484538912773132,
+ "learning_rate": 9.128802494136279e-07,
+ "loss": 1.3866,
+ "step": 2868
+ },
+ {
+ "epoch": 2.869807133256228,
+ "grad_norm": 0.706112802028656,
+ "learning_rate": 8.988030822713822e-07,
+ "loss": 1.4361,
+ "step": 2869
+ },
+ {
+ "epoch": 2.870807414585352,
+ "grad_norm": 0.5879806876182556,
+ "learning_rate": 8.848348094221192e-07,
+ "loss": 1.0335,
+ "step": 2870
+ },
+ {
+ "epoch": 2.871807695914476,
+ "grad_norm": 0.6835986971855164,
+ "learning_rate": 8.709754462144615e-07,
+ "loss": 1.3641,
+ "step": 2871
+ },
+ {
+ "epoch": 2.8728079772435997,
+ "grad_norm": 0.6715890765190125,
+ "learning_rate": 8.57225007877327e-07,
+ "loss": 1.3838,
+ "step": 2872
+ },
+ {
+ "epoch": 2.8738082585727236,
+ "grad_norm": 0.6681167483329773,
+ "learning_rate": 8.435835095199629e-07,
+ "loss": 1.1945,
+ "step": 2873
+ },
+ {
+ "epoch": 2.8748085399018475,
+ "grad_norm": 0.6274446249008179,
+ "learning_rate": 8.300509661319234e-07,
+ "loss": 1.1769,
+ "step": 2874
+ },
+ {
+ "epoch": 2.8758088212309714,
+ "grad_norm": 0.6935145258903503,
+ "learning_rate": 8.166273925830137e-07,
+ "loss": 1.2614,
+ "step": 2875
+ },
+ {
+ "epoch": 2.8768091025600953,
+ "grad_norm": 0.6466435790061951,
+ "learning_rate": 8.033128036233129e-07,
+ "loss": 1.4353,
+ "step": 2876
+ },
+ {
+ "epoch": 2.8778093838892187,
+ "grad_norm": 0.6013525128364563,
+ "learning_rate": 7.901072138831511e-07,
+ "loss": 1.0708,
+ "step": 2877
+ },
+ {
+ "epoch": 2.8788096652183426,
+ "grad_norm": 0.7344939708709717,
+ "learning_rate": 7.77010637873088e-07,
+ "loss": 1.4063,
+ "step": 2878
+ },
+ {
+ "epoch": 2.8798099465474665,
+ "grad_norm": 0.559315025806427,
+ "learning_rate": 7.640230899838785e-07,
+ "loss": 1.2612,
+ "step": 2879
+ },
+ {
+ "epoch": 2.8808102278765904,
+ "grad_norm": 0.6924266219139099,
+ "learning_rate": 7.511445844864962e-07,
+ "loss": 1.1298,
+ "step": 2880
+ },
+ {
+ "epoch": 2.8818105092057142,
+ "grad_norm": 0.6318414211273193,
+ "learning_rate": 7.383751355320989e-07,
+ "loss": 1.3504,
+ "step": 2881
+ },
+ {
+ "epoch": 2.8828107905348377,
+ "grad_norm": 0.6565932631492615,
+ "learning_rate": 7.257147571519851e-07,
+ "loss": 1.4179,
+ "step": 2882
+ },
+ {
+ "epoch": 2.8838110718639616,
+ "grad_norm": 0.936067521572113,
+ "learning_rate": 7.131634632576267e-07,
+ "loss": 1.3252,
+ "step": 2883
+ },
+ {
+ "epoch": 2.8848113531930855,
+ "grad_norm": 0.6421688795089722,
+ "learning_rate": 7.00721267640625e-07,
+ "loss": 1.3947,
+ "step": 2884
+ },
+ {
+ "epoch": 2.8858116345222093,
+ "grad_norm": 0.5888302326202393,
+ "learning_rate": 6.883881839727103e-07,
+ "loss": 1.2622,
+ "step": 2885
+ },
+ {
+ "epoch": 2.8868119158513332,
+ "grad_norm": 0.6417763829231262,
+ "learning_rate": 6.761642258056978e-07,
+ "loss": 1.2321,
+ "step": 2886
+ },
+ {
+ "epoch": 2.887812197180457,
+ "grad_norm": 0.697074294090271,
+ "learning_rate": 6.640494065715209e-07,
+ "loss": 1.3683,
+ "step": 2887
+ },
+ {
+ "epoch": 2.888812478509581,
+ "grad_norm": 0.570265531539917,
+ "learning_rate": 6.520437395821755e-07,
+ "loss": 1.2122,
+ "step": 2888
+ },
+ {
+ "epoch": 2.889812759838705,
+ "grad_norm": 0.5170134902000427,
+ "learning_rate": 6.401472380297091e-07,
+ "loss": 1.1952,
+ "step": 2889
+ },
+ {
+ "epoch": 2.8908130411678283,
+ "grad_norm": 0.6586466431617737,
+ "learning_rate": 6.283599149862207e-07,
+ "loss": 1.1862,
+ "step": 2890
+ },
+ {
+ "epoch": 2.891813322496952,
+ "grad_norm": 0.5928559303283691,
+ "learning_rate": 6.166817834038607e-07,
+ "loss": 1.1065,
+ "step": 2891
+ },
+ {
+ "epoch": 2.892813603826076,
+ "grad_norm": 0.5880603194236755,
+ "learning_rate": 6.051128561147756e-07,
+ "loss": 1.2642,
+ "step": 2892
+ },
+ {
+ "epoch": 2.8938138851552,
+ "grad_norm": 0.6198025345802307,
+ "learning_rate": 5.93653145831119e-07,
+ "loss": 1.4065,
+ "step": 2893
+ },
+ {
+ "epoch": 2.894814166484324,
+ "grad_norm": 0.6337584257125854,
+ "learning_rate": 5.823026651450625e-07,
+ "loss": 1.3118,
+ "step": 2894
+ },
+ {
+ "epoch": 2.8958144478134473,
+ "grad_norm": 0.5702016353607178,
+ "learning_rate": 5.710614265287073e-07,
+ "loss": 1.072,
+ "step": 2895
+ },
+ {
+ "epoch": 2.896814729142571,
+ "grad_norm": 0.6089914441108704,
+ "learning_rate": 5.599294423341506e-07,
+ "loss": 1.3368,
+ "step": 2896
+ },
+ {
+ "epoch": 2.897815010471695,
+ "grad_norm": 0.6208946108818054,
+ "learning_rate": 5.489067247934298e-07,
+ "loss": 1.2753,
+ "step": 2897
+ },
+ {
+ "epoch": 2.898815291800819,
+ "grad_norm": 0.5169445276260376,
+ "learning_rate": 5.379932860185122e-07,
+ "loss": 0.9038,
+ "step": 2898
+ },
+ {
+ "epoch": 2.899815573129943,
+ "grad_norm": 0.6453071236610413,
+ "learning_rate": 5.271891380013161e-07,
+ "loss": 1.2824,
+ "step": 2899
+ },
+ {
+ "epoch": 2.9008158544590668,
+ "grad_norm": 0.574354350566864,
+ "learning_rate": 5.164942926136118e-07,
+ "loss": 1.1945,
+ "step": 2900
+ },
+ {
+ "epoch": 2.9018161357881906,
+ "grad_norm": 0.6423181891441345,
+ "learning_rate": 5.059087616071211e-07,
+ "loss": 1.0824,
+ "step": 2901
+ },
+ {
+ "epoch": 2.902816417117314,
+ "grad_norm": 0.5713688135147095,
+ "learning_rate": 4.954325566134177e-07,
+ "loss": 1.1809,
+ "step": 2902
+ },
+ {
+ "epoch": 2.903816698446438,
+ "grad_norm": 0.5611819624900818,
+ "learning_rate": 4.85065689143982e-07,
+ "loss": 1.253,
+ "step": 2903
+ },
+ {
+ "epoch": 2.904816979775562,
+ "grad_norm": 0.6227515339851379,
+ "learning_rate": 4.748081705900909e-07,
+ "loss": 1.1268,
+ "step": 2904
+ },
+ {
+ "epoch": 2.9058172611046857,
+ "grad_norm": 0.698701024055481,
+ "learning_rate": 4.646600122229283e-07,
+ "loss": 1.369,
+ "step": 2905
+ },
+ {
+ "epoch": 2.9068175424338096,
+ "grad_norm": 0.6106724739074707,
+ "learning_rate": 4.546212251934745e-07,
+ "loss": 1.2973,
+ "step": 2906
+ },
+ {
+ "epoch": 2.907817823762933,
+ "grad_norm": 0.5942571759223938,
+ "learning_rate": 4.44691820532539e-07,
+ "loss": 1.2903,
+ "step": 2907
+ },
+ {
+ "epoch": 2.908818105092057,
+ "grad_norm": 0.6650285720825195,
+ "learning_rate": 4.3487180915074976e-07,
+ "loss": 1.3065,
+ "step": 2908
+ },
+ {
+ "epoch": 2.909818386421181,
+ "grad_norm": 0.6427621841430664,
+ "learning_rate": 4.251612018385087e-07,
+ "loss": 1.419,
+ "step": 2909
+ },
+ {
+ "epoch": 2.9108186677503047,
+ "grad_norm": 0.6081385612487793,
+ "learning_rate": 4.155600092660361e-07,
+ "loss": 1.2347,
+ "step": 2910
+ },
+ {
+ "epoch": 2.9118189490794286,
+ "grad_norm": 0.5915107131004333,
+ "learning_rate": 4.0606824198329287e-07,
+ "loss": 1.1163,
+ "step": 2911
+ },
+ {
+ "epoch": 2.9128192304085525,
+ "grad_norm": 0.7413575649261475,
+ "learning_rate": 3.9668591042002487e-07,
+ "loss": 1.3164,
+ "step": 2912
+ },
+ {
+ "epoch": 2.9138195117376764,
+ "grad_norm": 0.5955550670623779,
+ "learning_rate": 3.8741302488570774e-07,
+ "loss": 1.3538,
+ "step": 2913
+ },
+ {
+ "epoch": 2.9148197930668003,
+ "grad_norm": 0.6943541169166565,
+ "learning_rate": 3.782495955695686e-07,
+ "loss": 1.2355,
+ "step": 2914
+ },
+ {
+ "epoch": 2.9158200743959237,
+ "grad_norm": 0.6371672749519348,
+ "learning_rate": 3.6919563254056434e-07,
+ "loss": 1.3395,
+ "step": 2915
+ },
+ {
+ "epoch": 2.9168203557250476,
+ "grad_norm": 0.5414052605628967,
+ "learning_rate": 3.6025114574734785e-07,
+ "loss": 1.0837,
+ "step": 2916
+ },
+ {
+ "epoch": 2.9178206370541715,
+ "grad_norm": 0.6474078893661499,
+ "learning_rate": 3.514161450183129e-07,
+ "loss": 1.1137,
+ "step": 2917
+ },
+ {
+ "epoch": 2.9188209183832954,
+ "grad_norm": 0.746285080909729,
+ "learning_rate": 3.42690640061516e-07,
+ "loss": 1.2393,
+ "step": 2918
+ },
+ {
+ "epoch": 2.9198211997124193,
+ "grad_norm": 0.609183669090271,
+ "learning_rate": 3.3407464046470993e-07,
+ "loss": 1.2016,
+ "step": 2919
+ },
+ {
+ "epoch": 2.9208214810415427,
+ "grad_norm": 0.7043150067329407,
+ "learning_rate": 3.255681556953216e-07,
+ "loss": 1.1894,
+ "step": 2920
+ },
+ {
+ "epoch": 2.9218217623706666,
+ "grad_norm": 0.6038593649864197,
+ "learning_rate": 3.171711951004408e-07,
+ "loss": 1.3285,
+ "step": 2921
+ },
+ {
+ "epoch": 2.9228220436997905,
+ "grad_norm": 0.6714390516281128,
+ "learning_rate": 3.0888376790679795e-07,
+ "loss": 1.3793,
+ "step": 2922
+ },
+ {
+ "epoch": 2.9238223250289144,
+ "grad_norm": 0.5994642376899719,
+ "learning_rate": 3.007058832207976e-07,
+ "loss": 1.3138,
+ "step": 2923
+ },
+ {
+ "epoch": 2.9248226063580383,
+ "grad_norm": 0.6345430016517639,
+ "learning_rate": 2.926375500284406e-07,
+ "loss": 1.1618,
+ "step": 2924
+ },
+ {
+ "epoch": 2.925822887687162,
+ "grad_norm": 0.6328153014183044,
+ "learning_rate": 2.846787771953574e-07,
+ "loss": 1.3554,
+ "step": 2925
+ },
+ {
+ "epoch": 2.926823169016286,
+ "grad_norm": 0.6081757545471191,
+ "learning_rate": 2.7682957346683026e-07,
+ "loss": 1.3207,
+ "step": 2926
+ },
+ {
+ "epoch": 2.92782345034541,
+ "grad_norm": 0.6575589776039124,
+ "learning_rate": 2.6908994746768226e-07,
+ "loss": 1.3048,
+ "step": 2927
+ },
+ {
+ "epoch": 2.9288237316745334,
+ "grad_norm": 0.590454638004303,
+ "learning_rate": 2.6145990770238825e-07,
+ "loss": 1.2692,
+ "step": 2928
+ },
+ {
+ "epoch": 2.9298240130036572,
+ "grad_norm": 0.5916540026664734,
+ "learning_rate": 2.539394625549529e-07,
+ "loss": 1.1209,
+ "step": 2929
+ },
+ {
+ "epoch": 2.930824294332781,
+ "grad_norm": 0.6312572360038757,
+ "learning_rate": 2.4652862028902156e-07,
+ "loss": 1.2213,
+ "step": 2930
+ },
+ {
+ "epoch": 2.931824575661905,
+ "grad_norm": 1.0912832021713257,
+ "learning_rate": 2.392273890477359e-07,
+ "loss": 1.0917,
+ "step": 2931
+ },
+ {
+ "epoch": 2.9328248569910285,
+ "grad_norm": 0.5967919826507568,
+ "learning_rate": 2.3203577685385612e-07,
+ "loss": 1.2886,
+ "step": 2932
+ },
+ {
+ "epoch": 2.9338251383201523,
+ "grad_norm": 0.6887509822845459,
+ "learning_rate": 2.2495379160963891e-07,
+ "loss": 1.4177,
+ "step": 2933
+ },
+ {
+ "epoch": 2.9348254196492762,
+ "grad_norm": 0.6881254315376282,
+ "learning_rate": 2.179814410969261e-07,
+ "loss": 1.1938,
+ "step": 2934
+ },
+ {
+ "epoch": 2.9358257009784,
+ "grad_norm": 0.8602995872497559,
+ "learning_rate": 2.1111873297706696e-07,
+ "loss": 1.0975,
+ "step": 2935
+ },
+ {
+ "epoch": 2.936825982307524,
+ "grad_norm": 0.7097172141075134,
+ "learning_rate": 2.043656747909184e-07,
+ "loss": 1.5026,
+ "step": 2936
+ },
+ {
+ "epoch": 2.937826263636648,
+ "grad_norm": 0.5857811570167542,
+ "learning_rate": 1.977222739588891e-07,
+ "loss": 1.2069,
+ "step": 2937
+ },
+ {
+ "epoch": 2.938826544965772,
+ "grad_norm": 0.6201745271682739,
+ "learning_rate": 1.9118853778086199e-07,
+ "loss": 1.2504,
+ "step": 2938
+ },
+ {
+ "epoch": 2.9398268262948957,
+ "grad_norm": 0.7820329070091248,
+ "learning_rate": 1.847644734362497e-07,
+ "loss": 1.2293,
+ "step": 2939
+ },
+ {
+ "epoch": 2.940827107624019,
+ "grad_norm": 0.9819826483726501,
+ "learning_rate": 1.7845008798391682e-07,
+ "loss": 1.4997,
+ "step": 2940
+ },
+ {
+ "epoch": 2.941827388953143,
+ "grad_norm": 0.6182419061660767,
+ "learning_rate": 1.7224538836223546e-07,
+ "loss": 1.1516,
+ "step": 2941
+ },
+ {
+ "epoch": 2.942827670282267,
+ "grad_norm": 0.622620701789856,
+ "learning_rate": 1.6615038138906303e-07,
+ "loss": 1.2622,
+ "step": 2942
+ },
+ {
+ "epoch": 2.9438279516113908,
+ "grad_norm": 0.6595246195793152,
+ "learning_rate": 1.6016507376169777e-07,
+ "loss": 1.302,
+ "step": 2943
+ },
+ {
+ "epoch": 2.9448282329405147,
+ "grad_norm": 0.6354462504386902,
+ "learning_rate": 1.5428947205690103e-07,
+ "loss": 1.3658,
+ "step": 2944
+ },
+ {
+ "epoch": 2.945828514269638,
+ "grad_norm": 0.5985936522483826,
+ "learning_rate": 1.4852358273091948e-07,
+ "loss": 1.4015,
+ "step": 2945
+ },
+ {
+ "epoch": 2.946828795598762,
+ "grad_norm": 0.6461771130561829,
+ "learning_rate": 1.4286741211940736e-07,
+ "loss": 0.9782,
+ "step": 2946
+ },
+ {
+ "epoch": 2.947829076927886,
+ "grad_norm": 0.6160744428634644,
+ "learning_rate": 1.3732096643747084e-07,
+ "loss": 1.3057,
+ "step": 2947
+ },
+ {
+ "epoch": 2.9488293582570098,
+ "grad_norm": 0.6424825191497803,
+ "learning_rate": 1.3188425177966813e-07,
+ "loss": 1.2554,
+ "step": 2948
+ },
+ {
+ "epoch": 2.9498296395861336,
+ "grad_norm": 0.6611213088035583,
+ "learning_rate": 1.2655727411994278e-07,
+ "loss": 1.2187,
+ "step": 2949
+ },
+ {
+ "epoch": 2.9508299209152575,
+ "grad_norm": 0.5817217826843262,
+ "learning_rate": 1.2134003931169035e-07,
+ "loss": 1.4,
+ "step": 2950
+ },
+ {
+ "epoch": 2.9518302022443814,
+ "grad_norm": 0.6573741436004639,
+ "learning_rate": 1.1623255308772507e-07,
+ "loss": 1.0189,
+ "step": 2951
+ },
+ {
+ "epoch": 2.9528304835735053,
+ "grad_norm": 0.6182346940040588,
+ "learning_rate": 1.1123482106021322e-07,
+ "loss": 1.3023,
+ "step": 2952
+ },
+ {
+ "epoch": 2.9538307649026287,
+ "grad_norm": 0.6287407279014587,
+ "learning_rate": 1.0634684872079526e-07,
+ "loss": 1.2717,
+ "step": 2953
+ },
+ {
+ "epoch": 2.9548310462317526,
+ "grad_norm": 0.5616800785064697,
+ "learning_rate": 1.0156864144044154e-07,
+ "loss": 1.2228,
+ "step": 2954
+ },
+ {
+ "epoch": 2.9558313275608765,
+ "grad_norm": 0.700332522392273,
+ "learning_rate": 9.690020446956328e-08,
+ "loss": 1.4575,
+ "step": 2955
+ },
+ {
+ "epoch": 2.9568316088900004,
+ "grad_norm": 0.6529813408851624,
+ "learning_rate": 9.234154293790154e-08,
+ "loss": 1.3294,
+ "step": 2956
+ },
+ {
+ "epoch": 2.9578318902191243,
+ "grad_norm": 0.6121209859848022,
+ "learning_rate": 8.789266185461608e-08,
+ "loss": 1.155,
+ "step": 2957
+ },
+ {
+ "epoch": 2.9588321715482477,
+ "grad_norm": 0.7049760222434998,
+ "learning_rate": 8.355356610822984e-08,
+ "loss": 1.2936,
+ "step": 2958
+ },
+ {
+ "epoch": 2.9598324528773716,
+ "grad_norm": 0.6265867948532104,
+ "learning_rate": 7.932426046660669e-08,
+ "loss": 1.0534,
+ "step": 2959
+ },
+ {
+ "epoch": 2.9608327342064955,
+ "grad_norm": 0.6382441520690918,
+ "learning_rate": 7.520474957699586e-08,
+ "loss": 1.0862,
+ "step": 2960
+ },
+ {
+ "epoch": 2.9618330155356194,
+ "grad_norm": 0.626731812953949,
+ "learning_rate": 7.119503796599868e-08,
+ "loss": 1.0351,
+ "step": 2961
+ },
+ {
+ "epoch": 2.9628332968647433,
+ "grad_norm": 0.6465422511100769,
+ "learning_rate": 6.729513003955745e-08,
+ "loss": 1.197,
+ "step": 2962
+ },
+ {
+ "epoch": 2.963833578193867,
+ "grad_norm": 0.6029139757156372,
+ "learning_rate": 6.350503008296648e-08,
+ "loss": 1.4363,
+ "step": 2963
+ },
+ {
+ "epoch": 2.964833859522991,
+ "grad_norm": 0.7285861372947693,
+ "learning_rate": 5.98247422608722e-08,
+ "loss": 1.4143,
+ "step": 2964
+ },
+ {
+ "epoch": 2.9658341408521145,
+ "grad_norm": 0.6350001692771912,
+ "learning_rate": 5.6254270617228656e-08,
+ "loss": 1.1834,
+ "step": 2965
+ },
+ {
+ "epoch": 2.9668344221812384,
+ "grad_norm": 0.6555259227752686,
+ "learning_rate": 5.279361907534197e-08,
+ "loss": 1.0986,
+ "step": 2966
+ },
+ {
+ "epoch": 2.9678347035103623,
+ "grad_norm": 0.6389521956443787,
+ "learning_rate": 4.9442791437848136e-08,
+ "loss": 1.1511,
+ "step": 2967
+ },
+ {
+ "epoch": 2.968834984839486,
+ "grad_norm": 0.7111738324165344,
+ "learning_rate": 4.620179138670189e-08,
+ "loss": 1.2582,
+ "step": 2968
+ },
+ {
+ "epoch": 2.96983526616861,
+ "grad_norm": 0.6513357162475586,
+ "learning_rate": 4.3070622483165627e-08,
+ "loss": 1.1007,
+ "step": 2969
+ },
+ {
+ "epoch": 2.9708355474977335,
+ "grad_norm": 0.6898165941238403,
+ "learning_rate": 4.0049288167842705e-08,
+ "loss": 1.3118,
+ "step": 2970
+ },
+ {
+ "epoch": 2.9718358288268574,
+ "grad_norm": 0.645189106464386,
+ "learning_rate": 3.713779176061083e-08,
+ "loss": 1.3931,
+ "step": 2971
+ },
+ {
+ "epoch": 2.9728361101559813,
+ "grad_norm": 0.6340959072113037,
+ "learning_rate": 3.433613646069977e-08,
+ "loss": 1.1404,
+ "step": 2972
+ },
+ {
+ "epoch": 2.973836391485105,
+ "grad_norm": 0.6642202734947205,
+ "learning_rate": 3.164432534662476e-08,
+ "loss": 1.2204,
+ "step": 2973
+ },
+ {
+ "epoch": 2.974836672814229,
+ "grad_norm": 0.6397119164466858,
+ "learning_rate": 2.906236137617535e-08,
+ "loss": 1.3132,
+ "step": 2974
+ },
+ {
+ "epoch": 2.975836954143353,
+ "grad_norm": 0.614349365234375,
+ "learning_rate": 2.659024738648208e-08,
+ "loss": 1.0383,
+ "step": 2975
+ },
+ {
+ "epoch": 2.976837235472477,
+ "grad_norm": 0.6697264909744263,
+ "learning_rate": 2.4227986093938726e-08,
+ "loss": 1.2026,
+ "step": 2976
+ },
+ {
+ "epoch": 2.9778375168016007,
+ "grad_norm": 0.6192601323127747,
+ "learning_rate": 2.197558009425782e-08,
+ "loss": 1.3694,
+ "step": 2977
+ },
+ {
+ "epoch": 2.978837798130724,
+ "grad_norm": 0.7235078811645508,
+ "learning_rate": 1.983303186241514e-08,
+ "loss": 1.3063,
+ "step": 2978
+ },
+ {
+ "epoch": 2.979838079459848,
+ "grad_norm": 0.7176287770271301,
+ "learning_rate": 1.7800343752683023e-08,
+ "loss": 1.3161,
+ "step": 2979
+ },
+ {
+ "epoch": 2.980838360788972,
+ "grad_norm": 0.5626864433288574,
+ "learning_rate": 1.5877517998630355e-08,
+ "loss": 1.3038,
+ "step": 2980
+ },
+ {
+ "epoch": 2.981838642118096,
+ "grad_norm": 0.6934079527854919,
+ "learning_rate": 1.4064556713089261e-08,
+ "loss": 1.2691,
+ "step": 2981
+ },
+ {
+ "epoch": 2.9828389234472197,
+ "grad_norm": 0.6676089763641357,
+ "learning_rate": 1.2361461888166226e-08,
+ "loss": 1.301,
+ "step": 2982
+ },
+ {
+ "epoch": 2.983839204776343,
+ "grad_norm": 0.7442446947097778,
+ "learning_rate": 1.0768235395264282e-08,
+ "loss": 1.2848,
+ "step": 2983
+ },
+ {
+ "epoch": 2.984839486105467,
+ "grad_norm": 0.6114760637283325,
+ "learning_rate": 9.284878985038604e-09,
+ "loss": 1.0876,
+ "step": 2984
+ },
+ {
+ "epoch": 2.985839767434591,
+ "grad_norm": 0.6515705585479736,
+ "learning_rate": 7.911394287452023e-09,
+ "loss": 1.1076,
+ "step": 2985
+ },
+ {
+ "epoch": 2.986840048763715,
+ "grad_norm": 0.761256992816925,
+ "learning_rate": 6.647782811697312e-09,
+ "loss": 1.2699,
+ "step": 2986
+ },
+ {
+ "epoch": 2.9878403300928387,
+ "grad_norm": 0.625106155872345,
+ "learning_rate": 5.494045946263793e-09,
+ "loss": 1.2516,
+ "step": 2987
+ },
+ {
+ "epoch": 2.9888406114219626,
+ "grad_norm": 0.7670434713363647,
+ "learning_rate": 4.4501849589040355e-09,
+ "loss": 1.295,
+ "step": 2988
+ },
+ {
+ "epoch": 2.9898408927510864,
+ "grad_norm": 0.5739517211914062,
+ "learning_rate": 3.5162009966227537e-09,
+ "loss": 1.159,
+ "step": 2989
+ },
+ {
+ "epoch": 2.9908411740802103,
+ "grad_norm": 0.6593197584152222,
+ "learning_rate": 2.692095085699009e-09,
+ "loss": 1.3673,
+ "step": 2990
+ },
+ {
+ "epoch": 2.9918414554093338,
+ "grad_norm": 0.7491052150726318,
+ "learning_rate": 1.977868131675109e-09,
+ "loss": 1.2032,
+ "step": 2991
+ },
+ {
+ "epoch": 2.9928417367384577,
+ "grad_norm": 0.547743022441864,
+ "learning_rate": 1.3735209193677102e-09,
+ "loss": 1.0763,
+ "step": 2992
+ },
+ {
+ "epoch": 2.9938420180675815,
+ "grad_norm": 0.5710447430610657,
+ "learning_rate": 8.790541128345098e-10,
+ "loss": 1.2304,
+ "step": 2993
+ },
+ {
+ "epoch": 2.9948422993967054,
+ "grad_norm": 0.7330135703086853,
+ "learning_rate": 4.944682554075542e-10,
+ "loss": 1.3085,
+ "step": 2994
+ },
+ {
+ "epoch": 2.995842580725829,
+ "grad_norm": 0.7360786199569702,
+ "learning_rate": 2.1976376969323753e-10,
+ "loss": 1.2843,
+ "step": 2995
+ },
+ {
+ "epoch": 2.9968428620549528,
+ "grad_norm": 0.6117874979972839,
+ "learning_rate": 5.4940957516791404e-11,
+ "loss": 1.172,
+ "step": 2996
+ },
+ {
+ "epoch": 2.9978431433840766,
+ "grad_norm": 0.7113885879516602,
+ "learning_rate": 0.0,
+ "loss": 1.2594,
+ "step": 2997
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 2997,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 3,
+ "save_steps": 500,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": true
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 2.425081051226112e+16,
+ "train_batch_size": 2,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-2997/training_args.bin b/checkpoint-2997/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..6831a6d623a8a2b84942bb5584c6aa5bc14eee51
--- /dev/null
+++ b/checkpoint-2997/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5db131d6e82df60077bab037ec35113e1b0836a0bd72bb0a21e3fc0311a527de
+size 5304
diff --git a/checkpoint-500/config.json b/checkpoint-500/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..49d4bd1e1961ef7daf9af9a7dbe539789d58d949
--- /dev/null
+++ b/checkpoint-500/config.json
@@ -0,0 +1,35 @@
+{
+ "_name_or_path": "facebook/nllb-200-distilled-600M",
+ "activation_dropout": 0.0,
+ "activation_function": "relu",
+ "architectures": [
+ "M2M100ForConditionalGeneration"
+ ],
+ "attention_dropout": 0.1,
+ "bos_token_id": 0,
+ "d_model": 1024,
+ "decoder_attention_heads": 16,
+ "decoder_ffn_dim": 4096,
+ "decoder_layerdrop": 0,
+ "decoder_layers": 12,
+ "decoder_start_token_id": 2,
+ "dropout": 0.1,
+ "encoder_attention_heads": 16,
+ "encoder_ffn_dim": 4096,
+ "encoder_layerdrop": 0,
+ "encoder_layers": 12,
+ "eos_token_id": 2,
+ "init_std": 0.02,
+ "is_encoder_decoder": true,
+ "max_length": 200,
+ "max_position_embeddings": 1024,
+ "model_type": "m2m_100",
+ "num_hidden_layers": 12,
+ "pad_token_id": 1,
+ "scale_embedding": true,
+ "tokenizer_class": "NllbTokenizer",
+ "torch_dtype": "float32",
+ "transformers_version": "4.43.1",
+ "use_cache": true,
+ "vocab_size": 256206
+}
diff --git a/checkpoint-500/generation_config.json b/checkpoint-500/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..680d3e0504023804deeb427766576194a0f17d47
--- /dev/null
+++ b/checkpoint-500/generation_config.json
@@ -0,0 +1,9 @@
+{
+ "_from_model_config": true,
+ "bos_token_id": 0,
+ "decoder_start_token_id": 2,
+ "eos_token_id": 2,
+ "max_length": 200,
+ "pad_token_id": 1,
+ "transformers_version": "4.43.1"
+}
diff --git a/checkpoint-500/model.safetensors b/checkpoint-500/model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..37a1cf3ffb06842a2ab69e8b48f6b23df8f46dea
--- /dev/null
+++ b/checkpoint-500/model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a5803a8af5e42e6195c56ca80ebd0da7764c980d770982938f0de52ea32649d0
+size 2460354912
diff --git a/checkpoint-500/optimizer.pt b/checkpoint-500/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..2bf6dc950c37bc21f970f597794356058cc32d2e
--- /dev/null
+++ b/checkpoint-500/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6675a9570bd1d3171be55329fddf55b229c40852fc80bb580edfde80cb1c89b6
+size 5125261
diff --git a/checkpoint-500/rng_state.pth b/checkpoint-500/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..02c49571e0b25a3cf860b721750300b7591bb78c
--- /dev/null
+++ b/checkpoint-500/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d68c3ee2c3c5ddcf6db7279ef06b916b10b0e09325f7315b87b5ca3d934a3a3e
+size 14244
diff --git a/checkpoint-500/scheduler.pt b/checkpoint-500/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..9cee5592ce4571aaf187e5914e2a5061190c309f
--- /dev/null
+++ b/checkpoint-500/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dc10672965fd5a4faf482f9128558b0f5647fbb1b90caeb7d1201dc785e0b337
+size 1064
diff --git a/checkpoint-500/sentencepiece.bpe.model b/checkpoint-500/sentencepiece.bpe.model
new file mode 100644
index 0000000000000000000000000000000000000000..dc2262d3e1d375b235eb71c24119c8e73f85d4ad
--- /dev/null
+++ b/checkpoint-500/sentencepiece.bpe.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:14bb8dfb35c0ffdea7bc01e56cea38b9e3d5efcdcb9c251d6b40538e1aab555a
+size 4852054
diff --git a/checkpoint-500/special_tokens_map.json b/checkpoint-500/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..770c6f4e25faf27bbc3878b806f2ecfb88c5169e
--- /dev/null
+++ b/checkpoint-500/special_tokens_map.json
@@ -0,0 +1,255 @@
+{
+ "additional_special_tokens": [
+ "ace_Arab",
+ "ace_Latn",
+ "acm_Arab",
+ "acq_Arab",
+ "aeb_Arab",
+ "afr_Latn",
+ "ajp_Arab",
+ "aka_Latn",
+ "amh_Ethi",
+ "apc_Arab",
+ "arb_Arab",
+ "ars_Arab",
+ "ary_Arab",
+ "arz_Arab",
+ "asm_Beng",
+ "ast_Latn",
+ "awa_Deva",
+ "ayr_Latn",
+ "azb_Arab",
+ "azj_Latn",
+ "bak_Cyrl",
+ "bam_Latn",
+ "ban_Latn",
+ "bel_Cyrl",
+ "bem_Latn",
+ "ben_Beng",
+ "bho_Deva",
+ "bjn_Arab",
+ "bjn_Latn",
+ "bod_Tibt",
+ "bos_Latn",
+ "bug_Latn",
+ "bul_Cyrl",
+ "cat_Latn",
+ "ceb_Latn",
+ "ces_Latn",
+ "cjk_Latn",
+ "ckb_Arab",
+ "crh_Latn",
+ "cym_Latn",
+ "dan_Latn",
+ "deu_Latn",
+ "dik_Latn",
+ "dyu_Latn",
+ "dzo_Tibt",
+ "ell_Grek",
+ "eng_Latn",
+ "epo_Latn",
+ "est_Latn",
+ "eus_Latn",
+ "ewe_Latn",
+ "fao_Latn",
+ "pes_Arab",
+ "fij_Latn",
+ "fin_Latn",
+ "fon_Latn",
+ "fra_Latn",
+ "fur_Latn",
+ "fuv_Latn",
+ "gla_Latn",
+ "gle_Latn",
+ "glg_Latn",
+ "grn_Latn",
+ "guj_Gujr",
+ "hat_Latn",
+ "hau_Latn",
+ "heb_Hebr",
+ "hin_Deva",
+ "hne_Deva",
+ "hrv_Latn",
+ "hun_Latn",
+ "hye_Armn",
+ "ibo_Latn",
+ "ilo_Latn",
+ "ind_Latn",
+ "isl_Latn",
+ "ita_Latn",
+ "jav_Latn",
+ "jpn_Jpan",
+ "kab_Latn",
+ "kac_Latn",
+ "kam_Latn",
+ "kan_Knda",
+ "kas_Arab",
+ "kas_Deva",
+ "kat_Geor",
+ "knc_Arab",
+ "knc_Latn",
+ "kaz_Cyrl",
+ "kbp_Latn",
+ "kea_Latn",
+ "khm_Khmr",
+ "kik_Latn",
+ "kin_Latn",
+ "kir_Cyrl",
+ "kmb_Latn",
+ "kon_Latn",
+ "kor_Hang",
+ "kmr_Latn",
+ "lao_Laoo",
+ "lvs_Latn",
+ "lij_Latn",
+ "lim_Latn",
+ "lin_Latn",
+ "lit_Latn",
+ "lmo_Latn",
+ "ltg_Latn",
+ "ltz_Latn",
+ "lua_Latn",
+ "lug_Latn",
+ "luo_Latn",
+ "lus_Latn",
+ "mag_Deva",
+ "mai_Deva",
+ "mal_Mlym",
+ "mar_Deva",
+ "min_Latn",
+ "mkd_Cyrl",
+ "plt_Latn",
+ "mlt_Latn",
+ "mni_Beng",
+ "khk_Cyrl",
+ "mos_Latn",
+ "mri_Latn",
+ "zsm_Latn",
+ "mya_Mymr",
+ "nld_Latn",
+ "nno_Latn",
+ "nob_Latn",
+ "npi_Deva",
+ "nso_Latn",
+ "nus_Latn",
+ "nya_Latn",
+ "oci_Latn",
+ "gaz_Latn",
+ "ory_Orya",
+ "pag_Latn",
+ "pan_Guru",
+ "pap_Latn",
+ "pol_Latn",
+ "por_Latn",
+ "prs_Arab",
+ "pbt_Arab",
+ "quy_Latn",
+ "ron_Latn",
+ "run_Latn",
+ "rus_Cyrl",
+ "sag_Latn",
+ "san_Deva",
+ "sat_Beng",
+ "scn_Latn",
+ "shn_Mymr",
+ "sin_Sinh",
+ "slk_Latn",
+ "slv_Latn",
+ "smo_Latn",
+ "sna_Latn",
+ "snd_Arab",
+ "som_Latn",
+ "sot_Latn",
+ "spa_Latn",
+ "als_Latn",
+ "srd_Latn",
+ "srp_Cyrl",
+ "ssw_Latn",
+ "sun_Latn",
+ "swe_Latn",
+ "swh_Latn",
+ "szl_Latn",
+ "tam_Taml",
+ "tat_Cyrl",
+ "tel_Telu",
+ "tgk_Cyrl",
+ "tgl_Latn",
+ "tha_Thai",
+ "tir_Ethi",
+ "taq_Latn",
+ "taq_Tfng",
+ "tpi_Latn",
+ "tsn_Latn",
+ "tso_Latn",
+ "tuk_Latn",
+ "tum_Latn",
+ "tur_Latn",
+ "twi_Latn",
+ "tzm_Tfng",
+ "uig_Arab",
+ "ukr_Cyrl",
+ "umb_Latn",
+ "urd_Arab",
+ "uzn_Latn",
+ "vec_Latn",
+ "vie_Latn",
+ "war_Latn",
+ "wol_Latn",
+ "xho_Latn",
+ "ydd_Hebr",
+ "yor_Latn",
+ "yue_Hant",
+ "zho_Hans",
+ "zho_Hant",
+ "zul_Latn"
+ ],
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "cls_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "mask_token": {
+ "content": "",
+ "lstrip": true,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "sep_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-500/tokenizer.json b/checkpoint-500/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..98050e98b98364c06d83b3f41864076220cb8408
--- /dev/null
+++ b/checkpoint-500/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3b39b25b0763a1dd69dec54081fafcf10770d9f2538a3bd975a0c4be6d60a9c2
+size 17331294
diff --git a/checkpoint-500/tokenizer_config.json b/checkpoint-500/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..f1424d3657c008568198b44be241646482e7e9f2
--- /dev/null
+++ b/checkpoint-500/tokenizer_config.json
@@ -0,0 +1,1878 @@
+{
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "3": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256001": {
+ "content": "ace_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256002": {
+ "content": "ace_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256003": {
+ "content": "acm_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256004": {
+ "content": "acq_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256005": {
+ "content": "aeb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256006": {
+ "content": "afr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256007": {
+ "content": "ajp_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256008": {
+ "content": "aka_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256009": {
+ "content": "amh_Ethi",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256010": {
+ "content": "apc_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256011": {
+ "content": "arb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256012": {
+ "content": "ars_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256013": {
+ "content": "ary_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256014": {
+ "content": "arz_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256015": {
+ "content": "asm_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256016": {
+ "content": "ast_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256017": {
+ "content": "awa_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256018": {
+ "content": "ayr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256019": {
+ "content": "azb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256020": {
+ "content": "azj_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256021": {
+ "content": "bak_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256022": {
+ "content": "bam_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256023": {
+ "content": "ban_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256024": {
+ "content": "bel_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256025": {
+ "content": "bem_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256026": {
+ "content": "ben_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256027": {
+ "content": "bho_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256028": {
+ "content": "bjn_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256029": {
+ "content": "bjn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256030": {
+ "content": "bod_Tibt",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256031": {
+ "content": "bos_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256032": {
+ "content": "bug_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256033": {
+ "content": "bul_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256034": {
+ "content": "cat_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256035": {
+ "content": "ceb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256036": {
+ "content": "ces_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256037": {
+ "content": "cjk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256038": {
+ "content": "ckb_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256039": {
+ "content": "crh_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256040": {
+ "content": "cym_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256041": {
+ "content": "dan_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256042": {
+ "content": "deu_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256043": {
+ "content": "dik_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256044": {
+ "content": "dyu_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256045": {
+ "content": "dzo_Tibt",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256046": {
+ "content": "ell_Grek",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256047": {
+ "content": "eng_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256048": {
+ "content": "epo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256049": {
+ "content": "est_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256050": {
+ "content": "eus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256051": {
+ "content": "ewe_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256052": {
+ "content": "fao_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256053": {
+ "content": "pes_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256054": {
+ "content": "fij_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256055": {
+ "content": "fin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256056": {
+ "content": "fon_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256057": {
+ "content": "fra_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256058": {
+ "content": "fur_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256059": {
+ "content": "fuv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256060": {
+ "content": "gla_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256061": {
+ "content": "gle_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256062": {
+ "content": "glg_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256063": {
+ "content": "grn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256064": {
+ "content": "guj_Gujr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256065": {
+ "content": "hat_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256066": {
+ "content": "hau_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256067": {
+ "content": "heb_Hebr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256068": {
+ "content": "hin_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256069": {
+ "content": "hne_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256070": {
+ "content": "hrv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256071": {
+ "content": "hun_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256072": {
+ "content": "hye_Armn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256073": {
+ "content": "ibo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256074": {
+ "content": "ilo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256075": {
+ "content": "ind_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256076": {
+ "content": "isl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256077": {
+ "content": "ita_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256078": {
+ "content": "jav_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256079": {
+ "content": "jpn_Jpan",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256080": {
+ "content": "kab_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256081": {
+ "content": "kac_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256082": {
+ "content": "kam_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256083": {
+ "content": "kan_Knda",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256084": {
+ "content": "kas_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256085": {
+ "content": "kas_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256086": {
+ "content": "kat_Geor",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256087": {
+ "content": "knc_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256088": {
+ "content": "knc_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256089": {
+ "content": "kaz_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256090": {
+ "content": "kbp_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256091": {
+ "content": "kea_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256092": {
+ "content": "khm_Khmr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256093": {
+ "content": "kik_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256094": {
+ "content": "kin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256095": {
+ "content": "kir_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256096": {
+ "content": "kmb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256097": {
+ "content": "kon_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256098": {
+ "content": "kor_Hang",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256099": {
+ "content": "kmr_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256100": {
+ "content": "lao_Laoo",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256101": {
+ "content": "lvs_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256102": {
+ "content": "lij_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256103": {
+ "content": "lim_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256104": {
+ "content": "lin_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256105": {
+ "content": "lit_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256106": {
+ "content": "lmo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256107": {
+ "content": "ltg_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256108": {
+ "content": "ltz_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256109": {
+ "content": "lua_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256110": {
+ "content": "lug_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256111": {
+ "content": "luo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256112": {
+ "content": "lus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256113": {
+ "content": "mag_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256114": {
+ "content": "mai_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256115": {
+ "content": "mal_Mlym",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256116": {
+ "content": "mar_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256117": {
+ "content": "min_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256118": {
+ "content": "mkd_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256119": {
+ "content": "plt_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256120": {
+ "content": "mlt_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256121": {
+ "content": "mni_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256122": {
+ "content": "khk_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256123": {
+ "content": "mos_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256124": {
+ "content": "mri_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256125": {
+ "content": "zsm_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256126": {
+ "content": "mya_Mymr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256127": {
+ "content": "nld_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256128": {
+ "content": "nno_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256129": {
+ "content": "nob_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256130": {
+ "content": "npi_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256131": {
+ "content": "nso_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256132": {
+ "content": "nus_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256133": {
+ "content": "nya_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256134": {
+ "content": "oci_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256135": {
+ "content": "gaz_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256136": {
+ "content": "ory_Orya",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256137": {
+ "content": "pag_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256138": {
+ "content": "pan_Guru",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256139": {
+ "content": "pap_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256140": {
+ "content": "pol_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256141": {
+ "content": "por_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256142": {
+ "content": "prs_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256143": {
+ "content": "pbt_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256144": {
+ "content": "quy_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256145": {
+ "content": "ron_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256146": {
+ "content": "run_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256147": {
+ "content": "rus_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256148": {
+ "content": "sag_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256149": {
+ "content": "san_Deva",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256150": {
+ "content": "sat_Beng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256151": {
+ "content": "scn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256152": {
+ "content": "shn_Mymr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256153": {
+ "content": "sin_Sinh",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256154": {
+ "content": "slk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256155": {
+ "content": "slv_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256156": {
+ "content": "smo_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256157": {
+ "content": "sna_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256158": {
+ "content": "snd_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256159": {
+ "content": "som_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256160": {
+ "content": "sot_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256161": {
+ "content": "spa_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256162": {
+ "content": "als_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256163": {
+ "content": "srd_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256164": {
+ "content": "srp_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256165": {
+ "content": "ssw_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256166": {
+ "content": "sun_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256167": {
+ "content": "swe_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256168": {
+ "content": "swh_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256169": {
+ "content": "szl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256170": {
+ "content": "tam_Taml",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256171": {
+ "content": "tat_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256172": {
+ "content": "tel_Telu",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256173": {
+ "content": "tgk_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256174": {
+ "content": "tgl_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256175": {
+ "content": "tha_Thai",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256176": {
+ "content": "tir_Ethi",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256177": {
+ "content": "taq_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256178": {
+ "content": "taq_Tfng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256179": {
+ "content": "tpi_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256180": {
+ "content": "tsn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256181": {
+ "content": "tso_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256182": {
+ "content": "tuk_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256183": {
+ "content": "tum_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256184": {
+ "content": "tur_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256185": {
+ "content": "twi_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256186": {
+ "content": "tzm_Tfng",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256187": {
+ "content": "uig_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256188": {
+ "content": "ukr_Cyrl",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256189": {
+ "content": "umb_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256190": {
+ "content": "urd_Arab",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256191": {
+ "content": "uzn_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256192": {
+ "content": "vec_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256193": {
+ "content": "vie_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256194": {
+ "content": "war_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256195": {
+ "content": "wol_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256196": {
+ "content": "xho_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256197": {
+ "content": "ydd_Hebr",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256198": {
+ "content": "yor_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256199": {
+ "content": "yue_Hant",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256200": {
+ "content": "zho_Hans",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256201": {
+ "content": "zho_Hant",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256202": {
+ "content": "zul_Latn",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "256203": {
+ "content": "",
+ "lstrip": true,
+ "normalized": true,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "additional_special_tokens": [
+ "ace_Arab",
+ "ace_Latn",
+ "acm_Arab",
+ "acq_Arab",
+ "aeb_Arab",
+ "afr_Latn",
+ "ajp_Arab",
+ "aka_Latn",
+ "amh_Ethi",
+ "apc_Arab",
+ "arb_Arab",
+ "ars_Arab",
+ "ary_Arab",
+ "arz_Arab",
+ "asm_Beng",
+ "ast_Latn",
+ "awa_Deva",
+ "ayr_Latn",
+ "azb_Arab",
+ "azj_Latn",
+ "bak_Cyrl",
+ "bam_Latn",
+ "ban_Latn",
+ "bel_Cyrl",
+ "bem_Latn",
+ "ben_Beng",
+ "bho_Deva",
+ "bjn_Arab",
+ "bjn_Latn",
+ "bod_Tibt",
+ "bos_Latn",
+ "bug_Latn",
+ "bul_Cyrl",
+ "cat_Latn",
+ "ceb_Latn",
+ "ces_Latn",
+ "cjk_Latn",
+ "ckb_Arab",
+ "crh_Latn",
+ "cym_Latn",
+ "dan_Latn",
+ "deu_Latn",
+ "dik_Latn",
+ "dyu_Latn",
+ "dzo_Tibt",
+ "ell_Grek",
+ "eng_Latn",
+ "epo_Latn",
+ "est_Latn",
+ "eus_Latn",
+ "ewe_Latn",
+ "fao_Latn",
+ "pes_Arab",
+ "fij_Latn",
+ "fin_Latn",
+ "fon_Latn",
+ "fra_Latn",
+ "fur_Latn",
+ "fuv_Latn",
+ "gla_Latn",
+ "gle_Latn",
+ "glg_Latn",
+ "grn_Latn",
+ "guj_Gujr",
+ "hat_Latn",
+ "hau_Latn",
+ "heb_Hebr",
+ "hin_Deva",
+ "hne_Deva",
+ "hrv_Latn",
+ "hun_Latn",
+ "hye_Armn",
+ "ibo_Latn",
+ "ilo_Latn",
+ "ind_Latn",
+ "isl_Latn",
+ "ita_Latn",
+ "jav_Latn",
+ "jpn_Jpan",
+ "kab_Latn",
+ "kac_Latn",
+ "kam_Latn",
+ "kan_Knda",
+ "kas_Arab",
+ "kas_Deva",
+ "kat_Geor",
+ "knc_Arab",
+ "knc_Latn",
+ "kaz_Cyrl",
+ "kbp_Latn",
+ "kea_Latn",
+ "khm_Khmr",
+ "kik_Latn",
+ "kin_Latn",
+ "kir_Cyrl",
+ "kmb_Latn",
+ "kon_Latn",
+ "kor_Hang",
+ "kmr_Latn",
+ "lao_Laoo",
+ "lvs_Latn",
+ "lij_Latn",
+ "lim_Latn",
+ "lin_Latn",
+ "lit_Latn",
+ "lmo_Latn",
+ "ltg_Latn",
+ "ltz_Latn",
+ "lua_Latn",
+ "lug_Latn",
+ "luo_Latn",
+ "lus_Latn",
+ "mag_Deva",
+ "mai_Deva",
+ "mal_Mlym",
+ "mar_Deva",
+ "min_Latn",
+ "mkd_Cyrl",
+ "plt_Latn",
+ "mlt_Latn",
+ "mni_Beng",
+ "khk_Cyrl",
+ "mos_Latn",
+ "mri_Latn",
+ "zsm_Latn",
+ "mya_Mymr",
+ "nld_Latn",
+ "nno_Latn",
+ "nob_Latn",
+ "npi_Deva",
+ "nso_Latn",
+ "nus_Latn",
+ "nya_Latn",
+ "oci_Latn",
+ "gaz_Latn",
+ "ory_Orya",
+ "pag_Latn",
+ "pan_Guru",
+ "pap_Latn",
+ "pol_Latn",
+ "por_Latn",
+ "prs_Arab",
+ "pbt_Arab",
+ "quy_Latn",
+ "ron_Latn",
+ "run_Latn",
+ "rus_Cyrl",
+ "sag_Latn",
+ "san_Deva",
+ "sat_Beng",
+ "scn_Latn",
+ "shn_Mymr",
+ "sin_Sinh",
+ "slk_Latn",
+ "slv_Latn",
+ "smo_Latn",
+ "sna_Latn",
+ "snd_Arab",
+ "som_Latn",
+ "sot_Latn",
+ "spa_Latn",
+ "als_Latn",
+ "srd_Latn",
+ "srp_Cyrl",
+ "ssw_Latn",
+ "sun_Latn",
+ "swe_Latn",
+ "swh_Latn",
+ "szl_Latn",
+ "tam_Taml",
+ "tat_Cyrl",
+ "tel_Telu",
+ "tgk_Cyrl",
+ "tgl_Latn",
+ "tha_Thai",
+ "tir_Ethi",
+ "taq_Latn",
+ "taq_Tfng",
+ "tpi_Latn",
+ "tsn_Latn",
+ "tso_Latn",
+ "tuk_Latn",
+ "tum_Latn",
+ "tur_Latn",
+ "twi_Latn",
+ "tzm_Tfng",
+ "uig_Arab",
+ "ukr_Cyrl",
+ "umb_Latn",
+ "urd_Arab",
+ "uzn_Latn",
+ "vec_Latn",
+ "vie_Latn",
+ "war_Latn",
+ "wol_Latn",
+ "xho_Latn",
+ "ydd_Hebr",
+ "yor_Latn",
+ "yue_Hant",
+ "zho_Hans",
+ "zho_Hant",
+ "zul_Latn"
+ ],
+ "bos_token": "",
+ "clean_up_tokenization_spaces": true,
+ "cls_token": "",
+ "eos_token": "",
+ "legacy_behaviour": false,
+ "mask_token": "",
+ "model_max_length": 1024,
+ "pad_token": "",
+ "sep_token": "",
+ "sp_model_kwargs": {},
+ "src_lang": "eng_Latn",
+ "tgt_lang": null,
+ "tokenizer_class": "NllbTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-500/trainer_state.json b/checkpoint-500/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..bf2e87f5609391fdf03bbe0eda22c598e975e5f4
--- /dev/null
+++ b/checkpoint-500/trainer_state.json
@@ -0,0 +1,3533 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 0.5001406645619081,
+ "eval_steps": 500,
+ "global_step": 500,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.001000281329123816,
+ "grad_norm": 7.503077030181885,
+ "learning_rate": 0.0001999999450590425,
+ "loss": 3.6513,
+ "step": 1
+ },
+ {
+ "epoch": 0.002000562658247632,
+ "grad_norm": 3.1191189289093018,
+ "learning_rate": 0.00019999978023623033,
+ "loss": 2.8683,
+ "step": 2
+ },
+ {
+ "epoch": 0.003000843987371448,
+ "grad_norm": 1.9282511472702026,
+ "learning_rate": 0.0001999995055317446,
+ "loss": 2.7882,
+ "step": 3
+ },
+ {
+ "epoch": 0.004001125316495264,
+ "grad_norm": 1.726026177406311,
+ "learning_rate": 0.00019999912094588717,
+ "loss": 2.7716,
+ "step": 4
+ },
+ {
+ "epoch": 0.005001406645619081,
+ "grad_norm": 1.4632996320724487,
+ "learning_rate": 0.00019999862647908064,
+ "loss": 2.6869,
+ "step": 5
+ },
+ {
+ "epoch": 0.006001687974742896,
+ "grad_norm": 1.5544543266296387,
+ "learning_rate": 0.00019999802213186834,
+ "loss": 2.6952,
+ "step": 6
+ },
+ {
+ "epoch": 0.007001969303866712,
+ "grad_norm": 1.5888980627059937,
+ "learning_rate": 0.0001999973079049143,
+ "loss": 2.1237,
+ "step": 7
+ },
+ {
+ "epoch": 0.008002250632990529,
+ "grad_norm": 1.8750641345977783,
+ "learning_rate": 0.00019999648379900338,
+ "loss": 2.3376,
+ "step": 8
+ },
+ {
+ "epoch": 0.009002531962114344,
+ "grad_norm": 1.0540648698806763,
+ "learning_rate": 0.0001999955498150411,
+ "loss": 2.4896,
+ "step": 9
+ },
+ {
+ "epoch": 0.010002813291238161,
+ "grad_norm": 1.0269274711608887,
+ "learning_rate": 0.00019999450595405374,
+ "loss": 2.1365,
+ "step": 10
+ },
+ {
+ "epoch": 0.011003094620361977,
+ "grad_norm": 1.0851730108261108,
+ "learning_rate": 0.0001999933522171883,
+ "loss": 2.235,
+ "step": 11
+ },
+ {
+ "epoch": 0.012003375949485792,
+ "grad_norm": 0.927042543888092,
+ "learning_rate": 0.00019999208860571255,
+ "loss": 2.2438,
+ "step": 12
+ },
+ {
+ "epoch": 0.01300365727860961,
+ "grad_norm": 1.3729208707809448,
+ "learning_rate": 0.00019999071512101496,
+ "loss": 2.0845,
+ "step": 13
+ },
+ {
+ "epoch": 0.014003938607733425,
+ "grad_norm": 1.1325910091400146,
+ "learning_rate": 0.00019998923176460474,
+ "loss": 2.0668,
+ "step": 14
+ },
+ {
+ "epoch": 0.01500421993685724,
+ "grad_norm": 0.9290457367897034,
+ "learning_rate": 0.00019998763853811184,
+ "loss": 2.0227,
+ "step": 15
+ },
+ {
+ "epoch": 0.016004501265981057,
+ "grad_norm": 0.942140519618988,
+ "learning_rate": 0.00019998593544328692,
+ "loss": 2.1598,
+ "step": 16
+ },
+ {
+ "epoch": 0.017004782595104875,
+ "grad_norm": 1.096635103225708,
+ "learning_rate": 0.00019998412248200138,
+ "loss": 2.1897,
+ "step": 17
+ },
+ {
+ "epoch": 0.01800506392422869,
+ "grad_norm": 1.1107186079025269,
+ "learning_rate": 0.00019998219965624734,
+ "loss": 2.0546,
+ "step": 18
+ },
+ {
+ "epoch": 0.019005345253352506,
+ "grad_norm": 0.9696593880653381,
+ "learning_rate": 0.0001999801669681376,
+ "loss": 2.0317,
+ "step": 19
+ },
+ {
+ "epoch": 0.020005626582476323,
+ "grad_norm": 0.9394300580024719,
+ "learning_rate": 0.00019997802441990573,
+ "loss": 2.2883,
+ "step": 20
+ },
+ {
+ "epoch": 0.021005907911600136,
+ "grad_norm": 1.08865225315094,
+ "learning_rate": 0.00019997577201390606,
+ "loss": 1.9838,
+ "step": 21
+ },
+ {
+ "epoch": 0.022006189240723954,
+ "grad_norm": 1.0712405443191528,
+ "learning_rate": 0.00019997340975261353,
+ "loss": 2.1177,
+ "step": 22
+ },
+ {
+ "epoch": 0.02300647056984777,
+ "grad_norm": 1.3190314769744873,
+ "learning_rate": 0.00019997093763862383,
+ "loss": 1.9755,
+ "step": 23
+ },
+ {
+ "epoch": 0.024006751898971584,
+ "grad_norm": 1.0659812688827515,
+ "learning_rate": 0.0001999683556746534,
+ "loss": 1.9829,
+ "step": 24
+ },
+ {
+ "epoch": 0.0250070332280954,
+ "grad_norm": 1.1824345588684082,
+ "learning_rate": 0.0001999656638635393,
+ "loss": 2.4219,
+ "step": 25
+ },
+ {
+ "epoch": 0.02600731455721922,
+ "grad_norm": 1.3446214199066162,
+ "learning_rate": 0.0001999628622082394,
+ "loss": 1.9644,
+ "step": 26
+ },
+ {
+ "epoch": 0.027007595886343033,
+ "grad_norm": 1.2527475357055664,
+ "learning_rate": 0.0001999599507118322,
+ "loss": 2.1889,
+ "step": 27
+ },
+ {
+ "epoch": 0.02800787721546685,
+ "grad_norm": 1.4738999605178833,
+ "learning_rate": 0.00019995692937751683,
+ "loss": 2.1949,
+ "step": 28
+ },
+ {
+ "epoch": 0.029008158544590667,
+ "grad_norm": 1.0533576011657715,
+ "learning_rate": 0.0001999537982086133,
+ "loss": 2.1034,
+ "step": 29
+ },
+ {
+ "epoch": 0.03000843987371448,
+ "grad_norm": 1.0343223810195923,
+ "learning_rate": 0.00019995055720856218,
+ "loss": 1.9561,
+ "step": 30
+ },
+ {
+ "epoch": 0.031008721202838298,
+ "grad_norm": 1.1149976253509521,
+ "learning_rate": 0.00019994720638092468,
+ "loss": 2.0981,
+ "step": 31
+ },
+ {
+ "epoch": 0.032009002531962115,
+ "grad_norm": 1.197178840637207,
+ "learning_rate": 0.00019994374572938277,
+ "loss": 2.1587,
+ "step": 32
+ },
+ {
+ "epoch": 0.03300928386108593,
+ "grad_norm": 0.9382303953170776,
+ "learning_rate": 0.00019994017525773913,
+ "loss": 1.869,
+ "step": 33
+ },
+ {
+ "epoch": 0.03400956519020975,
+ "grad_norm": 1.0526461601257324,
+ "learning_rate": 0.00019993649496991705,
+ "loss": 1.9045,
+ "step": 34
+ },
+ {
+ "epoch": 0.03500984651933356,
+ "grad_norm": 0.8510498404502869,
+ "learning_rate": 0.00019993270486996046,
+ "loss": 2.1005,
+ "step": 35
+ },
+ {
+ "epoch": 0.03601012784845738,
+ "grad_norm": 0.9990401268005371,
+ "learning_rate": 0.000199928804962034,
+ "loss": 1.8569,
+ "step": 36
+ },
+ {
+ "epoch": 0.037010409177581194,
+ "grad_norm": 0.9243854284286499,
+ "learning_rate": 0.00019992479525042303,
+ "loss": 1.9666,
+ "step": 37
+ },
+ {
+ "epoch": 0.03801069050670501,
+ "grad_norm": 0.7774227261543274,
+ "learning_rate": 0.00019992067573953342,
+ "loss": 2.0376,
+ "step": 38
+ },
+ {
+ "epoch": 0.03901097183582883,
+ "grad_norm": 0.8114833235740662,
+ "learning_rate": 0.0001999164464338918,
+ "loss": 2.1608,
+ "step": 39
+ },
+ {
+ "epoch": 0.040011253164952645,
+ "grad_norm": 0.8716320395469666,
+ "learning_rate": 0.0001999121073381454,
+ "loss": 2.0743,
+ "step": 40
+ },
+ {
+ "epoch": 0.041011534494076456,
+ "grad_norm": 0.9571239948272705,
+ "learning_rate": 0.0001999076584570621,
+ "loss": 2.0128,
+ "step": 41
+ },
+ {
+ "epoch": 0.04201181582320027,
+ "grad_norm": 1.038691520690918,
+ "learning_rate": 0.00019990309979553045,
+ "loss": 1.976,
+ "step": 42
+ },
+ {
+ "epoch": 0.04301209715232409,
+ "grad_norm": 1.0576292276382446,
+ "learning_rate": 0.00019989843135855958,
+ "loss": 1.94,
+ "step": 43
+ },
+ {
+ "epoch": 0.04401237848144791,
+ "grad_norm": 1.0991204977035522,
+ "learning_rate": 0.00019989365315127922,
+ "loss": 1.9397,
+ "step": 44
+ },
+ {
+ "epoch": 0.045012659810571724,
+ "grad_norm": 0.9268686175346375,
+ "learning_rate": 0.0001998887651789398,
+ "loss": 1.9305,
+ "step": 45
+ },
+ {
+ "epoch": 0.04601294113969554,
+ "grad_norm": 0.8459104299545288,
+ "learning_rate": 0.0001998837674469123,
+ "loss": 1.7941,
+ "step": 46
+ },
+ {
+ "epoch": 0.04701322246881936,
+ "grad_norm": 0.9260527491569519,
+ "learning_rate": 0.00019987865996068833,
+ "loss": 1.8843,
+ "step": 47
+ },
+ {
+ "epoch": 0.04801350379794317,
+ "grad_norm": 0.8370497226715088,
+ "learning_rate": 0.00019987344272588006,
+ "loss": 1.8779,
+ "step": 48
+ },
+ {
+ "epoch": 0.049013785127066986,
+ "grad_norm": 0.9228008389472961,
+ "learning_rate": 0.00019986811574822033,
+ "loss": 2.1713,
+ "step": 49
+ },
+ {
+ "epoch": 0.0500140664561908,
+ "grad_norm": 1.013746738433838,
+ "learning_rate": 0.00019986267903356254,
+ "loss": 2.1443,
+ "step": 50
+ },
+ {
+ "epoch": 0.05101434778531462,
+ "grad_norm": 1.0155737400054932,
+ "learning_rate": 0.0001998571325878806,
+ "loss": 1.9679,
+ "step": 51
+ },
+ {
+ "epoch": 0.05201462911443844,
+ "grad_norm": 0.9591345191001892,
+ "learning_rate": 0.0001998514764172691,
+ "loss": 2.0611,
+ "step": 52
+ },
+ {
+ "epoch": 0.053014910443562255,
+ "grad_norm": 0.9030050039291382,
+ "learning_rate": 0.00019984571052794313,
+ "loss": 1.9698,
+ "step": 53
+ },
+ {
+ "epoch": 0.054015191772686065,
+ "grad_norm": 0.7697799205780029,
+ "learning_rate": 0.00019983983492623833,
+ "loss": 2.0609,
+ "step": 54
+ },
+ {
+ "epoch": 0.05501547310180988,
+ "grad_norm": 0.8806005716323853,
+ "learning_rate": 0.00019983384961861096,
+ "loss": 1.9756,
+ "step": 55
+ },
+ {
+ "epoch": 0.0560157544309337,
+ "grad_norm": 0.9424449801445007,
+ "learning_rate": 0.0001998277546116378,
+ "loss": 2.0913,
+ "step": 56
+ },
+ {
+ "epoch": 0.05701603576005752,
+ "grad_norm": 1.139495849609375,
+ "learning_rate": 0.00019982154991201608,
+ "loss": 2.2524,
+ "step": 57
+ },
+ {
+ "epoch": 0.058016317089181334,
+ "grad_norm": 1.094347357749939,
+ "learning_rate": 0.00019981523552656377,
+ "loss": 1.8501,
+ "step": 58
+ },
+ {
+ "epoch": 0.05901659841830515,
+ "grad_norm": 1.1519278287887573,
+ "learning_rate": 0.00019980881146221914,
+ "loss": 1.9866,
+ "step": 59
+ },
+ {
+ "epoch": 0.06001687974742896,
+ "grad_norm": 1.2018250226974487,
+ "learning_rate": 0.00019980227772604112,
+ "loss": 1.8226,
+ "step": 60
+ },
+ {
+ "epoch": 0.06101716107655278,
+ "grad_norm": 0.9565753936767578,
+ "learning_rate": 0.0001997956343252091,
+ "loss": 1.8434,
+ "step": 61
+ },
+ {
+ "epoch": 0.062017442405676595,
+ "grad_norm": 1.0832768678665161,
+ "learning_rate": 0.00019978888126702296,
+ "loss": 2.1271,
+ "step": 62
+ },
+ {
+ "epoch": 0.06301772373480041,
+ "grad_norm": 0.8973837494850159,
+ "learning_rate": 0.00019978201855890308,
+ "loss": 1.8331,
+ "step": 63
+ },
+ {
+ "epoch": 0.06401800506392423,
+ "grad_norm": 0.8754604458808899,
+ "learning_rate": 0.00019977504620839035,
+ "loss": 2.1379,
+ "step": 64
+ },
+ {
+ "epoch": 0.06501828639304805,
+ "grad_norm": 0.8244839310646057,
+ "learning_rate": 0.00019976796422314615,
+ "loss": 1.8431,
+ "step": 65
+ },
+ {
+ "epoch": 0.06601856772217186,
+ "grad_norm": 0.8213551044464111,
+ "learning_rate": 0.00019976077261095226,
+ "loss": 1.9155,
+ "step": 66
+ },
+ {
+ "epoch": 0.06701884905129568,
+ "grad_norm": 0.9140985608100891,
+ "learning_rate": 0.00019975347137971098,
+ "loss": 2.0651,
+ "step": 67
+ },
+ {
+ "epoch": 0.0680191303804195,
+ "grad_norm": 0.8518921732902527,
+ "learning_rate": 0.00019974606053744503,
+ "loss": 1.8197,
+ "step": 68
+ },
+ {
+ "epoch": 0.06901941170954332,
+ "grad_norm": 0.8397145867347717,
+ "learning_rate": 0.00019973854009229763,
+ "loss": 1.8621,
+ "step": 69
+ },
+ {
+ "epoch": 0.07001969303866712,
+ "grad_norm": 0.8727964162826538,
+ "learning_rate": 0.00019973091005253232,
+ "loss": 1.762,
+ "step": 70
+ },
+ {
+ "epoch": 0.07101997436779094,
+ "grad_norm": 0.9284623265266418,
+ "learning_rate": 0.0001997231704265332,
+ "loss": 1.8675,
+ "step": 71
+ },
+ {
+ "epoch": 0.07202025569691475,
+ "grad_norm": 0.8280015587806702,
+ "learning_rate": 0.00019971532122280464,
+ "loss": 1.931,
+ "step": 72
+ },
+ {
+ "epoch": 0.07302053702603857,
+ "grad_norm": 0.7591394186019897,
+ "learning_rate": 0.0001997073624499716,
+ "loss": 1.8485,
+ "step": 73
+ },
+ {
+ "epoch": 0.07402081835516239,
+ "grad_norm": 0.975128710269928,
+ "learning_rate": 0.0001996992941167792,
+ "loss": 2.0784,
+ "step": 74
+ },
+ {
+ "epoch": 0.0750210996842862,
+ "grad_norm": 0.8034948110580444,
+ "learning_rate": 0.00019969111623209323,
+ "loss": 1.9849,
+ "step": 75
+ },
+ {
+ "epoch": 0.07602138101341002,
+ "grad_norm": 0.8540483713150024,
+ "learning_rate": 0.00019968282880489957,
+ "loss": 1.7832,
+ "step": 76
+ },
+ {
+ "epoch": 0.07702166234253384,
+ "grad_norm": 0.8181695342063904,
+ "learning_rate": 0.00019967443184430467,
+ "loss": 1.944,
+ "step": 77
+ },
+ {
+ "epoch": 0.07802194367165766,
+ "grad_norm": 0.8446747064590454,
+ "learning_rate": 0.0001996659253595353,
+ "loss": 1.8508,
+ "step": 78
+ },
+ {
+ "epoch": 0.07902222500078147,
+ "grad_norm": 0.8280364871025085,
+ "learning_rate": 0.0001996573093599385,
+ "loss": 1.843,
+ "step": 79
+ },
+ {
+ "epoch": 0.08002250632990529,
+ "grad_norm": 0.8016006350517273,
+ "learning_rate": 0.00019964858385498172,
+ "loss": 1.9368,
+ "step": 80
+ },
+ {
+ "epoch": 0.08102278765902911,
+ "grad_norm": 0.8450536131858826,
+ "learning_rate": 0.00019963974885425266,
+ "loss": 1.9736,
+ "step": 81
+ },
+ {
+ "epoch": 0.08202306898815291,
+ "grad_norm": 0.9172171950340271,
+ "learning_rate": 0.00019963080436745945,
+ "loss": 1.9382,
+ "step": 82
+ },
+ {
+ "epoch": 0.08302335031727673,
+ "grad_norm": 0.8581916689872742,
+ "learning_rate": 0.00019962175040443044,
+ "loss": 2.224,
+ "step": 83
+ },
+ {
+ "epoch": 0.08402363164640055,
+ "grad_norm": 0.9350367188453674,
+ "learning_rate": 0.0001996125869751143,
+ "loss": 2.0519,
+ "step": 84
+ },
+ {
+ "epoch": 0.08502391297552436,
+ "grad_norm": 0.9276247620582581,
+ "learning_rate": 0.00019960331408957997,
+ "loss": 1.9657,
+ "step": 85
+ },
+ {
+ "epoch": 0.08602419430464818,
+ "grad_norm": 0.871574342250824,
+ "learning_rate": 0.00019959393175801671,
+ "loss": 1.9399,
+ "step": 86
+ },
+ {
+ "epoch": 0.087024475633772,
+ "grad_norm": 1.0662888288497925,
+ "learning_rate": 0.00019958443999073397,
+ "loss": 1.9089,
+ "step": 87
+ },
+ {
+ "epoch": 0.08802475696289581,
+ "grad_norm": 0.8258713483810425,
+ "learning_rate": 0.00019957483879816151,
+ "loss": 1.839,
+ "step": 88
+ },
+ {
+ "epoch": 0.08902503829201963,
+ "grad_norm": 0.8154664039611816,
+ "learning_rate": 0.00019956512819084928,
+ "loss": 1.8409,
+ "step": 89
+ },
+ {
+ "epoch": 0.09002531962114345,
+ "grad_norm": 0.8584638833999634,
+ "learning_rate": 0.00019955530817946748,
+ "loss": 1.9521,
+ "step": 90
+ },
+ {
+ "epoch": 0.09102560095026727,
+ "grad_norm": 0.7917523384094238,
+ "learning_rate": 0.00019954537877480655,
+ "loss": 1.9495,
+ "step": 91
+ },
+ {
+ "epoch": 0.09202588227939108,
+ "grad_norm": 1.0129039287567139,
+ "learning_rate": 0.00019953533998777706,
+ "loss": 1.949,
+ "step": 92
+ },
+ {
+ "epoch": 0.0930261636085149,
+ "grad_norm": 0.8677986264228821,
+ "learning_rate": 0.00019952519182940993,
+ "loss": 1.7875,
+ "step": 93
+ },
+ {
+ "epoch": 0.09402644493763872,
+ "grad_norm": 0.8848614692687988,
+ "learning_rate": 0.00019951493431085603,
+ "loss": 2.0675,
+ "step": 94
+ },
+ {
+ "epoch": 0.09502672626676252,
+ "grad_norm": 0.9936463832855225,
+ "learning_rate": 0.00019950456744338658,
+ "loss": 1.6761,
+ "step": 95
+ },
+ {
+ "epoch": 0.09602700759588634,
+ "grad_norm": 1.0520148277282715,
+ "learning_rate": 0.00019949409123839288,
+ "loss": 2.081,
+ "step": 96
+ },
+ {
+ "epoch": 0.09702728892501016,
+ "grad_norm": 0.8061773180961609,
+ "learning_rate": 0.00019948350570738642,
+ "loss": 1.7281,
+ "step": 97
+ },
+ {
+ "epoch": 0.09802757025413397,
+ "grad_norm": 0.7642756104469299,
+ "learning_rate": 0.0001994728108619987,
+ "loss": 2.0032,
+ "step": 98
+ },
+ {
+ "epoch": 0.09902785158325779,
+ "grad_norm": 0.8541550040245056,
+ "learning_rate": 0.0001994620067139815,
+ "loss": 2.1136,
+ "step": 99
+ },
+ {
+ "epoch": 0.1000281329123816,
+ "grad_norm": 0.7868679761886597,
+ "learning_rate": 0.00019945109327520658,
+ "loss": 1.8695,
+ "step": 100
+ },
+ {
+ "epoch": 0.10102841424150542,
+ "grad_norm": 0.8776901364326477,
+ "learning_rate": 0.00019944007055766586,
+ "loss": 1.9786,
+ "step": 101
+ },
+ {
+ "epoch": 0.10202869557062924,
+ "grad_norm": 0.9013833999633789,
+ "learning_rate": 0.00019942893857347128,
+ "loss": 2.1466,
+ "step": 102
+ },
+ {
+ "epoch": 0.10302897689975306,
+ "grad_norm": 0.957558274269104,
+ "learning_rate": 0.00019941769733485494,
+ "loss": 2.0473,
+ "step": 103
+ },
+ {
+ "epoch": 0.10402925822887688,
+ "grad_norm": 0.8921108841896057,
+ "learning_rate": 0.00019940634685416888,
+ "loss": 1.7882,
+ "step": 104
+ },
+ {
+ "epoch": 0.10502953955800069,
+ "grad_norm": 0.896019697189331,
+ "learning_rate": 0.00019939488714388524,
+ "loss": 1.8811,
+ "step": 105
+ },
+ {
+ "epoch": 0.10602982088712451,
+ "grad_norm": 0.8792067766189575,
+ "learning_rate": 0.00019938331821659614,
+ "loss": 1.8624,
+ "step": 106
+ },
+ {
+ "epoch": 0.10703010221624833,
+ "grad_norm": 0.8739930391311646,
+ "learning_rate": 0.0001993716400850138,
+ "loss": 1.8105,
+ "step": 107
+ },
+ {
+ "epoch": 0.10803038354537213,
+ "grad_norm": 0.7678424715995789,
+ "learning_rate": 0.0001993598527619703,
+ "loss": 1.8772,
+ "step": 108
+ },
+ {
+ "epoch": 0.10903066487449595,
+ "grad_norm": 0.8718745112419128,
+ "learning_rate": 0.00019934795626041783,
+ "loss": 1.8236,
+ "step": 109
+ },
+ {
+ "epoch": 0.11003094620361976,
+ "grad_norm": 0.8467247486114502,
+ "learning_rate": 0.0001993359505934285,
+ "loss": 1.8188,
+ "step": 110
+ },
+ {
+ "epoch": 0.11103122753274358,
+ "grad_norm": 0.8685783743858337,
+ "learning_rate": 0.00019932383577419432,
+ "loss": 2.0775,
+ "step": 111
+ },
+ {
+ "epoch": 0.1120315088618674,
+ "grad_norm": 0.7799698710441589,
+ "learning_rate": 0.0001993116118160273,
+ "loss": 1.6489,
+ "step": 112
+ },
+ {
+ "epoch": 0.11303179019099122,
+ "grad_norm": 0.7900094389915466,
+ "learning_rate": 0.00019929927873235938,
+ "loss": 1.8332,
+ "step": 113
+ },
+ {
+ "epoch": 0.11403207152011503,
+ "grad_norm": 0.9433258771896362,
+ "learning_rate": 0.00019928683653674237,
+ "loss": 1.9331,
+ "step": 114
+ },
+ {
+ "epoch": 0.11503235284923885,
+ "grad_norm": 0.8861056566238403,
+ "learning_rate": 0.00019927428524284805,
+ "loss": 1.9135,
+ "step": 115
+ },
+ {
+ "epoch": 0.11603263417836267,
+ "grad_norm": 0.8566756844520569,
+ "learning_rate": 0.00019926162486446792,
+ "loss": 1.9874,
+ "step": 116
+ },
+ {
+ "epoch": 0.11703291550748648,
+ "grad_norm": 0.6897929310798645,
+ "learning_rate": 0.0001992488554155135,
+ "loss": 1.946,
+ "step": 117
+ },
+ {
+ "epoch": 0.1180331968366103,
+ "grad_norm": 0.7807729244232178,
+ "learning_rate": 0.00019923597691001615,
+ "loss": 1.8127,
+ "step": 118
+ },
+ {
+ "epoch": 0.11903347816573412,
+ "grad_norm": 0.7572523355484009,
+ "learning_rate": 0.0001992229893621269,
+ "loss": 1.7768,
+ "step": 119
+ },
+ {
+ "epoch": 0.12003375949485792,
+ "grad_norm": 0.7393172979354858,
+ "learning_rate": 0.00019920989278611687,
+ "loss": 1.894,
+ "step": 120
+ },
+ {
+ "epoch": 0.12103404082398174,
+ "grad_norm": 0.866576611995697,
+ "learning_rate": 0.0001991966871963767,
+ "loss": 1.9285,
+ "step": 121
+ },
+ {
+ "epoch": 0.12203432215310556,
+ "grad_norm": 0.7326533794403076,
+ "learning_rate": 0.000199183372607417,
+ "loss": 1.9309,
+ "step": 122
+ },
+ {
+ "epoch": 0.12303460348222937,
+ "grad_norm": 0.7655537724494934,
+ "learning_rate": 0.0001991699490338681,
+ "loss": 2.1145,
+ "step": 123
+ },
+ {
+ "epoch": 0.12403488481135319,
+ "grad_norm": 0.9714633226394653,
+ "learning_rate": 0.00019915641649048005,
+ "loss": 2.0341,
+ "step": 124
+ },
+ {
+ "epoch": 0.12503516614047702,
+ "grad_norm": 0.8542420864105225,
+ "learning_rate": 0.0001991427749921227,
+ "loss": 2.1426,
+ "step": 125
+ },
+ {
+ "epoch": 0.12603544746960083,
+ "grad_norm": 0.8286274671554565,
+ "learning_rate": 0.00019912902455378556,
+ "loss": 1.8452,
+ "step": 126
+ },
+ {
+ "epoch": 0.12703572879872463,
+ "grad_norm": 0.8823768496513367,
+ "learning_rate": 0.00019911516519057788,
+ "loss": 1.8651,
+ "step": 127
+ },
+ {
+ "epoch": 0.12803601012784846,
+ "grad_norm": 0.7301567196846008,
+ "learning_rate": 0.00019910119691772863,
+ "loss": 1.7776,
+ "step": 128
+ },
+ {
+ "epoch": 0.12903629145697226,
+ "grad_norm": 0.8402552604675293,
+ "learning_rate": 0.00019908711975058637,
+ "loss": 1.8617,
+ "step": 129
+ },
+ {
+ "epoch": 0.1300365727860961,
+ "grad_norm": 0.814500093460083,
+ "learning_rate": 0.0001990729337046194,
+ "loss": 1.9156,
+ "step": 130
+ },
+ {
+ "epoch": 0.1310368541152199,
+ "grad_norm": 0.8262699246406555,
+ "learning_rate": 0.0001990586387954156,
+ "loss": 1.8659,
+ "step": 131
+ },
+ {
+ "epoch": 0.13203713544434373,
+ "grad_norm": 0.8846324682235718,
+ "learning_rate": 0.00019904423503868247,
+ "loss": 2.043,
+ "step": 132
+ },
+ {
+ "epoch": 0.13303741677346753,
+ "grad_norm": 0.8757227659225464,
+ "learning_rate": 0.00019902972245024715,
+ "loss": 1.9217,
+ "step": 133
+ },
+ {
+ "epoch": 0.13403769810259136,
+ "grad_norm": 0.8476879596710205,
+ "learning_rate": 0.00019901510104605637,
+ "loss": 1.8892,
+ "step": 134
+ },
+ {
+ "epoch": 0.13503797943171517,
+ "grad_norm": 0.7707583904266357,
+ "learning_rate": 0.00019900037084217637,
+ "loss": 1.787,
+ "step": 135
+ },
+ {
+ "epoch": 0.136038260760839,
+ "grad_norm": 0.7389562129974365,
+ "learning_rate": 0.00019898553185479303,
+ "loss": 1.5854,
+ "step": 136
+ },
+ {
+ "epoch": 0.1370385420899628,
+ "grad_norm": 0.7331375479698181,
+ "learning_rate": 0.00019897058410021167,
+ "loss": 1.997,
+ "step": 137
+ },
+ {
+ "epoch": 0.13803882341908663,
+ "grad_norm": 0.7219388484954834,
+ "learning_rate": 0.00019895552759485722,
+ "loss": 1.8337,
+ "step": 138
+ },
+ {
+ "epoch": 0.13903910474821043,
+ "grad_norm": 0.8535702228546143,
+ "learning_rate": 0.00019894036235527395,
+ "loss": 1.7818,
+ "step": 139
+ },
+ {
+ "epoch": 0.14003938607733424,
+ "grad_norm": 0.7627841830253601,
+ "learning_rate": 0.00019892508839812584,
+ "loss": 1.8173,
+ "step": 140
+ },
+ {
+ "epoch": 0.14103966740645807,
+ "grad_norm": 0.8397619724273682,
+ "learning_rate": 0.00019890970574019617,
+ "loss": 1.8735,
+ "step": 141
+ },
+ {
+ "epoch": 0.14203994873558187,
+ "grad_norm": 0.8093482851982117,
+ "learning_rate": 0.00019889421439838763,
+ "loss": 1.9918,
+ "step": 142
+ },
+ {
+ "epoch": 0.1430402300647057,
+ "grad_norm": 0.8853684067726135,
+ "learning_rate": 0.00019887861438972246,
+ "loss": 1.5825,
+ "step": 143
+ },
+ {
+ "epoch": 0.1440405113938295,
+ "grad_norm": 0.7413788437843323,
+ "learning_rate": 0.00019886290573134228,
+ "loss": 1.9068,
+ "step": 144
+ },
+ {
+ "epoch": 0.14504079272295334,
+ "grad_norm": 0.7924477458000183,
+ "learning_rate": 0.000198847088440508,
+ "loss": 1.8582,
+ "step": 145
+ },
+ {
+ "epoch": 0.14604107405207714,
+ "grad_norm": 0.8679131865501404,
+ "learning_rate": 0.0001988311625346,
+ "loss": 1.7104,
+ "step": 146
+ },
+ {
+ "epoch": 0.14704135538120097,
+ "grad_norm": 0.7480150461196899,
+ "learning_rate": 0.00019881512803111796,
+ "loss": 1.7288,
+ "step": 147
+ },
+ {
+ "epoch": 0.14804163671032478,
+ "grad_norm": 0.8382390737533569,
+ "learning_rate": 0.00019879898494768093,
+ "loss": 1.8004,
+ "step": 148
+ },
+ {
+ "epoch": 0.1490419180394486,
+ "grad_norm": 0.7360037565231323,
+ "learning_rate": 0.00019878273330202717,
+ "loss": 1.85,
+ "step": 149
+ },
+ {
+ "epoch": 0.1500421993685724,
+ "grad_norm": 0.9644019603729248,
+ "learning_rate": 0.00019876637311201433,
+ "loss": 2.065,
+ "step": 150
+ },
+ {
+ "epoch": 0.15104248069769624,
+ "grad_norm": 0.8116248250007629,
+ "learning_rate": 0.00019874990439561934,
+ "loss": 1.702,
+ "step": 151
+ },
+ {
+ "epoch": 0.15204276202682004,
+ "grad_norm": 0.9301722645759583,
+ "learning_rate": 0.0001987333271709383,
+ "loss": 1.8089,
+ "step": 152
+ },
+ {
+ "epoch": 0.15304304335594385,
+ "grad_norm": 0.7991555333137512,
+ "learning_rate": 0.00019871664145618657,
+ "loss": 1.8227,
+ "step": 153
+ },
+ {
+ "epoch": 0.15404332468506768,
+ "grad_norm": 0.8676092028617859,
+ "learning_rate": 0.00019869984726969878,
+ "loss": 1.7253,
+ "step": 154
+ },
+ {
+ "epoch": 0.15504360601419148,
+ "grad_norm": 0.8022972941398621,
+ "learning_rate": 0.00019868294462992866,
+ "loss": 1.8766,
+ "step": 155
+ },
+ {
+ "epoch": 0.1560438873433153,
+ "grad_norm": 1.128886103630066,
+ "learning_rate": 0.00019866593355544922,
+ "loss": 2.0197,
+ "step": 156
+ },
+ {
+ "epoch": 0.15704416867243912,
+ "grad_norm": 0.7420483827590942,
+ "learning_rate": 0.00019864881406495246,
+ "loss": 1.8825,
+ "step": 157
+ },
+ {
+ "epoch": 0.15804445000156295,
+ "grad_norm": 0.7797536849975586,
+ "learning_rate": 0.00019863158617724967,
+ "loss": 1.8892,
+ "step": 158
+ },
+ {
+ "epoch": 0.15904473133068675,
+ "grad_norm": 0.6859965324401855,
+ "learning_rate": 0.00019861424991127115,
+ "loss": 1.8424,
+ "step": 159
+ },
+ {
+ "epoch": 0.16004501265981058,
+ "grad_norm": 0.8115108609199524,
+ "learning_rate": 0.00019859680528606637,
+ "loss": 1.8394,
+ "step": 160
+ },
+ {
+ "epoch": 0.16104529398893438,
+ "grad_norm": 0.9756322503089905,
+ "learning_rate": 0.00019857925232080373,
+ "loss": 1.726,
+ "step": 161
+ },
+ {
+ "epoch": 0.16204557531805822,
+ "grad_norm": 0.8894350528717041,
+ "learning_rate": 0.00019856159103477086,
+ "loss": 1.8893,
+ "step": 162
+ },
+ {
+ "epoch": 0.16304585664718202,
+ "grad_norm": 0.8075819611549377,
+ "learning_rate": 0.00019854382144737426,
+ "loss": 1.6596,
+ "step": 163
+ },
+ {
+ "epoch": 0.16404613797630582,
+ "grad_norm": 0.8861923813819885,
+ "learning_rate": 0.00019852594357813952,
+ "loss": 1.9352,
+ "step": 164
+ },
+ {
+ "epoch": 0.16504641930542965,
+ "grad_norm": 0.8511936068534851,
+ "learning_rate": 0.00019850795744671116,
+ "loss": 1.9416,
+ "step": 165
+ },
+ {
+ "epoch": 0.16604670063455346,
+ "grad_norm": 0.9425658583641052,
+ "learning_rate": 0.0001984898630728527,
+ "loss": 1.9081,
+ "step": 166
+ },
+ {
+ "epoch": 0.1670469819636773,
+ "grad_norm": 0.7502055168151855,
+ "learning_rate": 0.0001984716604764466,
+ "loss": 1.703,
+ "step": 167
+ },
+ {
+ "epoch": 0.1680472632928011,
+ "grad_norm": 0.9135978817939758,
+ "learning_rate": 0.0001984533496774942,
+ "loss": 1.7641,
+ "step": 168
+ },
+ {
+ "epoch": 0.16904754462192492,
+ "grad_norm": 0.7768126726150513,
+ "learning_rate": 0.0001984349306961158,
+ "loss": 1.7053,
+ "step": 169
+ },
+ {
+ "epoch": 0.17004782595104873,
+ "grad_norm": 0.8106538653373718,
+ "learning_rate": 0.00019841640355255043,
+ "loss": 1.8646,
+ "step": 170
+ },
+ {
+ "epoch": 0.17104810728017256,
+ "grad_norm": 0.7872330546379089,
+ "learning_rate": 0.00019839776826715614,
+ "loss": 1.7814,
+ "step": 171
+ },
+ {
+ "epoch": 0.17204838860929636,
+ "grad_norm": 0.869532585144043,
+ "learning_rate": 0.00019837902486040978,
+ "loss": 1.7812,
+ "step": 172
+ },
+ {
+ "epoch": 0.1730486699384202,
+ "grad_norm": 1.015028715133667,
+ "learning_rate": 0.0001983601733529069,
+ "loss": 1.9432,
+ "step": 173
+ },
+ {
+ "epoch": 0.174048951267544,
+ "grad_norm": 0.800183117389679,
+ "learning_rate": 0.00019834121376536187,
+ "loss": 1.758,
+ "step": 174
+ },
+ {
+ "epoch": 0.17504923259666783,
+ "grad_norm": 0.7427104711532593,
+ "learning_rate": 0.00019832214611860793,
+ "loss": 1.6476,
+ "step": 175
+ },
+ {
+ "epoch": 0.17604951392579163,
+ "grad_norm": 0.8289130926132202,
+ "learning_rate": 0.00019830297043359692,
+ "loss": 1.7702,
+ "step": 176
+ },
+ {
+ "epoch": 0.17704979525491543,
+ "grad_norm": 0.8298771977424622,
+ "learning_rate": 0.00019828368673139947,
+ "loss": 1.7515,
+ "step": 177
+ },
+ {
+ "epoch": 0.17805007658403926,
+ "grad_norm": 0.7602815628051758,
+ "learning_rate": 0.0001982642950332049,
+ "loss": 1.7545,
+ "step": 178
+ },
+ {
+ "epoch": 0.17905035791316307,
+ "grad_norm": 0.8110321164131165,
+ "learning_rate": 0.00019824479536032112,
+ "loss": 2.2604,
+ "step": 179
+ },
+ {
+ "epoch": 0.1800506392422869,
+ "grad_norm": 0.882273256778717,
+ "learning_rate": 0.0001982251877341748,
+ "loss": 1.8133,
+ "step": 180
+ },
+ {
+ "epoch": 0.1810509205714107,
+ "grad_norm": 0.9015639424324036,
+ "learning_rate": 0.00019820547217631117,
+ "loss": 1.7282,
+ "step": 181
+ },
+ {
+ "epoch": 0.18205120190053453,
+ "grad_norm": 0.9231659173965454,
+ "learning_rate": 0.00019818564870839405,
+ "loss": 1.9094,
+ "step": 182
+ },
+ {
+ "epoch": 0.18305148322965833,
+ "grad_norm": 0.8110967874526978,
+ "learning_rate": 0.00019816571735220583,
+ "loss": 1.886,
+ "step": 183
+ },
+ {
+ "epoch": 0.18405176455878217,
+ "grad_norm": 0.7670036554336548,
+ "learning_rate": 0.00019814567812964748,
+ "loss": 1.9895,
+ "step": 184
+ },
+ {
+ "epoch": 0.18505204588790597,
+ "grad_norm": 0.7955975532531738,
+ "learning_rate": 0.00019812553106273847,
+ "loss": 1.8127,
+ "step": 185
+ },
+ {
+ "epoch": 0.1860523272170298,
+ "grad_norm": 0.8790062665939331,
+ "learning_rate": 0.00019810527617361681,
+ "loss": 1.8899,
+ "step": 186
+ },
+ {
+ "epoch": 0.1870526085461536,
+ "grad_norm": 0.8818586468696594,
+ "learning_rate": 0.00019808491348453894,
+ "loss": 1.7707,
+ "step": 187
+ },
+ {
+ "epoch": 0.18805288987527743,
+ "grad_norm": 0.746442437171936,
+ "learning_rate": 0.00019806444301787978,
+ "loss": 1.7281,
+ "step": 188
+ },
+ {
+ "epoch": 0.18905317120440124,
+ "grad_norm": 0.7786905169487,
+ "learning_rate": 0.0001980438647961327,
+ "loss": 1.7317,
+ "step": 189
+ },
+ {
+ "epoch": 0.19005345253352504,
+ "grad_norm": 0.9338862299919128,
+ "learning_rate": 0.00019802317884190935,
+ "loss": 1.9548,
+ "step": 190
+ },
+ {
+ "epoch": 0.19105373386264887,
+ "grad_norm": 0.7416581511497498,
+ "learning_rate": 0.00019800238517793996,
+ "loss": 1.8601,
+ "step": 191
+ },
+ {
+ "epoch": 0.19205401519177268,
+ "grad_norm": 0.6782898902893066,
+ "learning_rate": 0.00019798148382707296,
+ "loss": 1.8477,
+ "step": 192
+ },
+ {
+ "epoch": 0.1930542965208965,
+ "grad_norm": 0.7389237880706787,
+ "learning_rate": 0.00019796047481227515,
+ "loss": 1.7749,
+ "step": 193
+ },
+ {
+ "epoch": 0.1940545778500203,
+ "grad_norm": 0.9711095094680786,
+ "learning_rate": 0.00019793935815663163,
+ "loss": 2.0899,
+ "step": 194
+ },
+ {
+ "epoch": 0.19505485917914414,
+ "grad_norm": 0.7949391007423401,
+ "learning_rate": 0.00019791813388334581,
+ "loss": 1.8778,
+ "step": 195
+ },
+ {
+ "epoch": 0.19605514050826794,
+ "grad_norm": 0.8871057033538818,
+ "learning_rate": 0.00019789680201573933,
+ "loss": 1.7511,
+ "step": 196
+ },
+ {
+ "epoch": 0.19705542183739178,
+ "grad_norm": 0.8664624094963074,
+ "learning_rate": 0.00019787536257725202,
+ "loss": 1.7232,
+ "step": 197
+ },
+ {
+ "epoch": 0.19805570316651558,
+ "grad_norm": 0.871658980846405,
+ "learning_rate": 0.00019785381559144196,
+ "loss": 1.7987,
+ "step": 198
+ },
+ {
+ "epoch": 0.1990559844956394,
+ "grad_norm": 0.7748361229896545,
+ "learning_rate": 0.00019783216108198542,
+ "loss": 1.9239,
+ "step": 199
+ },
+ {
+ "epoch": 0.2000562658247632,
+ "grad_norm": 0.9393408298492432,
+ "learning_rate": 0.00019781039907267677,
+ "loss": 2.0936,
+ "step": 200
+ },
+ {
+ "epoch": 0.20105654715388704,
+ "grad_norm": 0.8519601225852966,
+ "learning_rate": 0.00019778852958742853,
+ "loss": 1.9108,
+ "step": 201
+ },
+ {
+ "epoch": 0.20205682848301085,
+ "grad_norm": 0.8464863300323486,
+ "learning_rate": 0.00019776655265027127,
+ "loss": 1.897,
+ "step": 202
+ },
+ {
+ "epoch": 0.20305710981213465,
+ "grad_norm": 0.8933351635932922,
+ "learning_rate": 0.00019774446828535371,
+ "loss": 1.8204,
+ "step": 203
+ },
+ {
+ "epoch": 0.20405739114125848,
+ "grad_norm": 0.8305785059928894,
+ "learning_rate": 0.00019772227651694256,
+ "loss": 1.9135,
+ "step": 204
+ },
+ {
+ "epoch": 0.20505767247038229,
+ "grad_norm": 0.8117037415504456,
+ "learning_rate": 0.00019769997736942258,
+ "loss": 1.7585,
+ "step": 205
+ },
+ {
+ "epoch": 0.20605795379950612,
+ "grad_norm": 0.7570348381996155,
+ "learning_rate": 0.00019767757086729647,
+ "loss": 1.8373,
+ "step": 206
+ },
+ {
+ "epoch": 0.20705823512862992,
+ "grad_norm": 0.9291234016418457,
+ "learning_rate": 0.00019765505703518496,
+ "loss": 1.7774,
+ "step": 207
+ },
+ {
+ "epoch": 0.20805851645775375,
+ "grad_norm": 0.8211004137992859,
+ "learning_rate": 0.00019763243589782662,
+ "loss": 1.8766,
+ "step": 208
+ },
+ {
+ "epoch": 0.20905879778687755,
+ "grad_norm": 0.6625431180000305,
+ "learning_rate": 0.00019760970748007803,
+ "loss": 1.628,
+ "step": 209
+ },
+ {
+ "epoch": 0.21005907911600138,
+ "grad_norm": 0.7974782586097717,
+ "learning_rate": 0.0001975868718069136,
+ "loss": 1.6896,
+ "step": 210
+ },
+ {
+ "epoch": 0.2110593604451252,
+ "grad_norm": 0.8364912867546082,
+ "learning_rate": 0.00019756392890342563,
+ "loss": 1.7492,
+ "step": 211
+ },
+ {
+ "epoch": 0.21205964177424902,
+ "grad_norm": 0.8730652332305908,
+ "learning_rate": 0.00019754087879482422,
+ "loss": 1.8295,
+ "step": 212
+ },
+ {
+ "epoch": 0.21305992310337282,
+ "grad_norm": 0.7532863020896912,
+ "learning_rate": 0.00019751772150643722,
+ "loss": 1.8309,
+ "step": 213
+ },
+ {
+ "epoch": 0.21406020443249665,
+ "grad_norm": 0.7375178933143616,
+ "learning_rate": 0.00019749445706371038,
+ "loss": 1.7854,
+ "step": 214
+ },
+ {
+ "epoch": 0.21506048576162046,
+ "grad_norm": 0.7524377703666687,
+ "learning_rate": 0.00019747108549220702,
+ "loss": 1.7683,
+ "step": 215
+ },
+ {
+ "epoch": 0.21606076709074426,
+ "grad_norm": 0.7331809997558594,
+ "learning_rate": 0.00019744760681760832,
+ "loss": 1.7103,
+ "step": 216
+ },
+ {
+ "epoch": 0.2170610484198681,
+ "grad_norm": 0.8083691596984863,
+ "learning_rate": 0.00019742402106571314,
+ "loss": 1.674,
+ "step": 217
+ },
+ {
+ "epoch": 0.2180613297489919,
+ "grad_norm": 0.8524570465087891,
+ "learning_rate": 0.00019740032826243788,
+ "loss": 1.7227,
+ "step": 218
+ },
+ {
+ "epoch": 0.21906161107811573,
+ "grad_norm": 0.7676658630371094,
+ "learning_rate": 0.0001973765284338167,
+ "loss": 1.8561,
+ "step": 219
+ },
+ {
+ "epoch": 0.22006189240723953,
+ "grad_norm": 0.7858710289001465,
+ "learning_rate": 0.00019735262160600127,
+ "loss": 1.7796,
+ "step": 220
+ },
+ {
+ "epoch": 0.22106217373636336,
+ "grad_norm": 0.7587497234344482,
+ "learning_rate": 0.00019732860780526088,
+ "loss": 1.9271,
+ "step": 221
+ },
+ {
+ "epoch": 0.22206245506548716,
+ "grad_norm": 0.8084688186645508,
+ "learning_rate": 0.00019730448705798239,
+ "loss": 1.8176,
+ "step": 222
+ },
+ {
+ "epoch": 0.223062736394611,
+ "grad_norm": 0.6736906170845032,
+ "learning_rate": 0.00019728025939067008,
+ "loss": 1.6288,
+ "step": 223
+ },
+ {
+ "epoch": 0.2240630177237348,
+ "grad_norm": 0.7483925819396973,
+ "learning_rate": 0.00019725592482994583,
+ "loss": 1.8363,
+ "step": 224
+ },
+ {
+ "epoch": 0.22506329905285863,
+ "grad_norm": 1.7995796203613281,
+ "learning_rate": 0.00019723148340254892,
+ "loss": 1.9072,
+ "step": 225
+ },
+ {
+ "epoch": 0.22606358038198243,
+ "grad_norm": 0.8028881549835205,
+ "learning_rate": 0.00019720693513533598,
+ "loss": 1.9021,
+ "step": 226
+ },
+ {
+ "epoch": 0.22706386171110624,
+ "grad_norm": 0.9853909015655518,
+ "learning_rate": 0.00019718228005528122,
+ "loss": 2.0159,
+ "step": 227
+ },
+ {
+ "epoch": 0.22806414304023007,
+ "grad_norm": 0.7784947156906128,
+ "learning_rate": 0.00019715751818947603,
+ "loss": 1.7816,
+ "step": 228
+ },
+ {
+ "epoch": 0.22906442436935387,
+ "grad_norm": 0.7447614669799805,
+ "learning_rate": 0.0001971326495651293,
+ "loss": 1.654,
+ "step": 229
+ },
+ {
+ "epoch": 0.2300647056984777,
+ "grad_norm": 0.8673064112663269,
+ "learning_rate": 0.00019710767420956705,
+ "loss": 2.0049,
+ "step": 230
+ },
+ {
+ "epoch": 0.2310649870276015,
+ "grad_norm": 0.8207747936248779,
+ "learning_rate": 0.0001970825921502328,
+ "loss": 1.9388,
+ "step": 231
+ },
+ {
+ "epoch": 0.23206526835672533,
+ "grad_norm": 0.742266058921814,
+ "learning_rate": 0.0001970574034146871,
+ "loss": 1.7658,
+ "step": 232
+ },
+ {
+ "epoch": 0.23306554968584914,
+ "grad_norm": 0.9097973704338074,
+ "learning_rate": 0.00019703210803060782,
+ "loss": 1.8023,
+ "step": 233
+ },
+ {
+ "epoch": 0.23406583101497297,
+ "grad_norm": 0.7512438297271729,
+ "learning_rate": 0.00019700670602579008,
+ "loss": 1.8551,
+ "step": 234
+ },
+ {
+ "epoch": 0.23506611234409677,
+ "grad_norm": 0.8303943872451782,
+ "learning_rate": 0.00019698119742814606,
+ "loss": 1.7723,
+ "step": 235
+ },
+ {
+ "epoch": 0.2360663936732206,
+ "grad_norm": 0.9195139408111572,
+ "learning_rate": 0.00019695558226570507,
+ "loss": 1.6426,
+ "step": 236
+ },
+ {
+ "epoch": 0.2370666750023444,
+ "grad_norm": 0.7734714150428772,
+ "learning_rate": 0.00019692986056661356,
+ "loss": 1.7798,
+ "step": 237
+ },
+ {
+ "epoch": 0.23806695633146824,
+ "grad_norm": 0.8759648203849792,
+ "learning_rate": 0.00019690403235913504,
+ "loss": 1.6465,
+ "step": 238
+ },
+ {
+ "epoch": 0.23906723766059204,
+ "grad_norm": 0.7688003778457642,
+ "learning_rate": 0.00019687809767165,
+ "loss": 2.0092,
+ "step": 239
+ },
+ {
+ "epoch": 0.24006751898971584,
+ "grad_norm": 0.7398790121078491,
+ "learning_rate": 0.000196852056532656,
+ "loss": 1.8176,
+ "step": 240
+ },
+ {
+ "epoch": 0.24106780031883968,
+ "grad_norm": 0.8921257853507996,
+ "learning_rate": 0.00019682590897076752,
+ "loss": 1.7387,
+ "step": 241
+ },
+ {
+ "epoch": 0.24206808164796348,
+ "grad_norm": 0.7939002513885498,
+ "learning_rate": 0.00019679965501471608,
+ "loss": 1.9417,
+ "step": 242
+ },
+ {
+ "epoch": 0.2430683629770873,
+ "grad_norm": 0.7798025608062744,
+ "learning_rate": 0.0001967732946933499,
+ "loss": 1.7134,
+ "step": 243
+ },
+ {
+ "epoch": 0.2440686443062111,
+ "grad_norm": 0.8007254600524902,
+ "learning_rate": 0.00019674682803563428,
+ "loss": 1.7387,
+ "step": 244
+ },
+ {
+ "epoch": 0.24506892563533494,
+ "grad_norm": 0.6257696151733398,
+ "learning_rate": 0.00019672025507065131,
+ "loss": 1.767,
+ "step": 245
+ },
+ {
+ "epoch": 0.24606920696445875,
+ "grad_norm": 0.7942785620689392,
+ "learning_rate": 0.00019669357582759983,
+ "loss": 1.8801,
+ "step": 246
+ },
+ {
+ "epoch": 0.24706948829358258,
+ "grad_norm": 0.7933829426765442,
+ "learning_rate": 0.00019666679033579552,
+ "loss": 1.9711,
+ "step": 247
+ },
+ {
+ "epoch": 0.24806976962270638,
+ "grad_norm": 0.7489326596260071,
+ "learning_rate": 0.00019663989862467082,
+ "loss": 1.8038,
+ "step": 248
+ },
+ {
+ "epoch": 0.2490700509518302,
+ "grad_norm": 0.7279101014137268,
+ "learning_rate": 0.00019661290072377482,
+ "loss": 1.66,
+ "step": 249
+ },
+ {
+ "epoch": 0.25007033228095404,
+ "grad_norm": 0.6823874115943909,
+ "learning_rate": 0.00019658579666277334,
+ "loss": 1.8064,
+ "step": 250
+ },
+ {
+ "epoch": 0.2510706136100778,
+ "grad_norm": 0.6561273336410522,
+ "learning_rate": 0.0001965585864714488,
+ "loss": 1.6874,
+ "step": 251
+ },
+ {
+ "epoch": 0.25207089493920165,
+ "grad_norm": 0.6457573175430298,
+ "learning_rate": 0.00019653127017970034,
+ "loss": 1.4587,
+ "step": 252
+ },
+ {
+ "epoch": 0.2530711762683255,
+ "grad_norm": 0.7649476528167725,
+ "learning_rate": 0.0001965038478175436,
+ "loss": 1.9811,
+ "step": 253
+ },
+ {
+ "epoch": 0.25407145759744926,
+ "grad_norm": 0.8786829710006714,
+ "learning_rate": 0.00019647631941511082,
+ "loss": 1.8629,
+ "step": 254
+ },
+ {
+ "epoch": 0.2550717389265731,
+ "grad_norm": 0.7038159966468811,
+ "learning_rate": 0.0001964486850026507,
+ "loss": 1.6885,
+ "step": 255
+ },
+ {
+ "epoch": 0.2560720202556969,
+ "grad_norm": 0.7255909442901611,
+ "learning_rate": 0.00019642094461052852,
+ "loss": 1.7335,
+ "step": 256
+ },
+ {
+ "epoch": 0.25707230158482075,
+ "grad_norm": 0.7780727744102478,
+ "learning_rate": 0.00019639309826922585,
+ "loss": 1.899,
+ "step": 257
+ },
+ {
+ "epoch": 0.2580725829139445,
+ "grad_norm": 0.8533650040626526,
+ "learning_rate": 0.0001963651460093409,
+ "loss": 1.7711,
+ "step": 258
+ },
+ {
+ "epoch": 0.25907286424306836,
+ "grad_norm": 0.6440068483352661,
+ "learning_rate": 0.00019633708786158806,
+ "loss": 1.6685,
+ "step": 259
+ },
+ {
+ "epoch": 0.2600731455721922,
+ "grad_norm": 0.6873877048492432,
+ "learning_rate": 0.00019630892385679818,
+ "loss": 1.7502,
+ "step": 260
+ },
+ {
+ "epoch": 0.261073426901316,
+ "grad_norm": 0.7100672721862793,
+ "learning_rate": 0.00019628065402591845,
+ "loss": 1.7789,
+ "step": 261
+ },
+ {
+ "epoch": 0.2620737082304398,
+ "grad_norm": 0.8447420001029968,
+ "learning_rate": 0.00019625227840001225,
+ "loss": 1.8577,
+ "step": 262
+ },
+ {
+ "epoch": 0.2630739895595636,
+ "grad_norm": 0.767888605594635,
+ "learning_rate": 0.0001962237970102593,
+ "loss": 1.5936,
+ "step": 263
+ },
+ {
+ "epoch": 0.26407427088868746,
+ "grad_norm": 0.6955805420875549,
+ "learning_rate": 0.0001961952098879555,
+ "loss": 1.7733,
+ "step": 264
+ },
+ {
+ "epoch": 0.26507455221781123,
+ "grad_norm": 0.777740478515625,
+ "learning_rate": 0.00019616651706451287,
+ "loss": 1.6027,
+ "step": 265
+ },
+ {
+ "epoch": 0.26607483354693506,
+ "grad_norm": 0.7691099047660828,
+ "learning_rate": 0.0001961377185714597,
+ "loss": 1.7457,
+ "step": 266
+ },
+ {
+ "epoch": 0.2670751148760589,
+ "grad_norm": 0.6778420805931091,
+ "learning_rate": 0.0001961088144404403,
+ "loss": 1.7704,
+ "step": 267
+ },
+ {
+ "epoch": 0.2680753962051827,
+ "grad_norm": 0.7943267226219177,
+ "learning_rate": 0.00019607980470321505,
+ "loss": 1.9775,
+ "step": 268
+ },
+ {
+ "epoch": 0.2690756775343065,
+ "grad_norm": 0.6660135388374329,
+ "learning_rate": 0.00019605068939166045,
+ "loss": 1.6556,
+ "step": 269
+ },
+ {
+ "epoch": 0.27007595886343033,
+ "grad_norm": 0.8664935827255249,
+ "learning_rate": 0.00019602146853776894,
+ "loss": 2.03,
+ "step": 270
+ },
+ {
+ "epoch": 0.27107624019255416,
+ "grad_norm": 0.7783074975013733,
+ "learning_rate": 0.000195992142173649,
+ "loss": 1.7426,
+ "step": 271
+ },
+ {
+ "epoch": 0.272076521521678,
+ "grad_norm": 0.7470223903656006,
+ "learning_rate": 0.0001959627103315249,
+ "loss": 1.7284,
+ "step": 272
+ },
+ {
+ "epoch": 0.27307680285080177,
+ "grad_norm": 0.7284931540489197,
+ "learning_rate": 0.00019593317304373705,
+ "loss": 1.6977,
+ "step": 273
+ },
+ {
+ "epoch": 0.2740770841799256,
+ "grad_norm": 0.7201762795448303,
+ "learning_rate": 0.00019590353034274144,
+ "loss": 1.7184,
+ "step": 274
+ },
+ {
+ "epoch": 0.27507736550904943,
+ "grad_norm": 0.6756151914596558,
+ "learning_rate": 0.00019587378226111014,
+ "loss": 1.7276,
+ "step": 275
+ },
+ {
+ "epoch": 0.27607764683817326,
+ "grad_norm": 0.6784201860427856,
+ "learning_rate": 0.00019584392883153088,
+ "loss": 1.642,
+ "step": 276
+ },
+ {
+ "epoch": 0.27707792816729704,
+ "grad_norm": 0.7387176752090454,
+ "learning_rate": 0.00019581397008680717,
+ "loss": 1.7911,
+ "step": 277
+ },
+ {
+ "epoch": 0.27807820949642087,
+ "grad_norm": 0.9367021918296814,
+ "learning_rate": 0.00019578390605985826,
+ "loss": 2.0034,
+ "step": 278
+ },
+ {
+ "epoch": 0.2790784908255447,
+ "grad_norm": 0.803698718547821,
+ "learning_rate": 0.00019575373678371909,
+ "loss": 1.7907,
+ "step": 279
+ },
+ {
+ "epoch": 0.2800787721546685,
+ "grad_norm": 0.7324479818344116,
+ "learning_rate": 0.00019572346229154025,
+ "loss": 1.5539,
+ "step": 280
+ },
+ {
+ "epoch": 0.2810790534837923,
+ "grad_norm": 0.7107382416725159,
+ "learning_rate": 0.00019569308261658787,
+ "loss": 1.838,
+ "step": 281
+ },
+ {
+ "epoch": 0.28207933481291614,
+ "grad_norm": 0.8698626756668091,
+ "learning_rate": 0.00019566259779224378,
+ "loss": 1.7433,
+ "step": 282
+ },
+ {
+ "epoch": 0.28307961614203997,
+ "grad_norm": 0.7804028391838074,
+ "learning_rate": 0.00019563200785200526,
+ "loss": 1.7161,
+ "step": 283
+ },
+ {
+ "epoch": 0.28407989747116374,
+ "grad_norm": 0.8762909173965454,
+ "learning_rate": 0.00019560131282948516,
+ "loss": 1.8031,
+ "step": 284
+ },
+ {
+ "epoch": 0.2850801788002876,
+ "grad_norm": 0.8252436518669128,
+ "learning_rate": 0.0001955705127584117,
+ "loss": 1.6434,
+ "step": 285
+ },
+ {
+ "epoch": 0.2860804601294114,
+ "grad_norm": 0.8220797181129456,
+ "learning_rate": 0.00019553960767262863,
+ "loss": 1.8522,
+ "step": 286
+ },
+ {
+ "epoch": 0.28708074145853524,
+ "grad_norm": 0.7883003950119019,
+ "learning_rate": 0.00019550859760609503,
+ "loss": 1.8245,
+ "step": 287
+ },
+ {
+ "epoch": 0.288081022787659,
+ "grad_norm": 0.9208703637123108,
+ "learning_rate": 0.00019547748259288536,
+ "loss": 1.8877,
+ "step": 288
+ },
+ {
+ "epoch": 0.28908130411678284,
+ "grad_norm": 0.8452202677726746,
+ "learning_rate": 0.0001954462626671894,
+ "loss": 1.554,
+ "step": 289
+ },
+ {
+ "epoch": 0.2900815854459067,
+ "grad_norm": 0.82865971326828,
+ "learning_rate": 0.0001954149378633122,
+ "loss": 1.655,
+ "step": 290
+ },
+ {
+ "epoch": 0.29108186677503045,
+ "grad_norm": 0.7871205806732178,
+ "learning_rate": 0.00019538350821567404,
+ "loss": 1.621,
+ "step": 291
+ },
+ {
+ "epoch": 0.2920821481041543,
+ "grad_norm": 0.8288848996162415,
+ "learning_rate": 0.00019535197375881045,
+ "loss": 1.9277,
+ "step": 292
+ },
+ {
+ "epoch": 0.2930824294332781,
+ "grad_norm": 0.7275516986846924,
+ "learning_rate": 0.00019532033452737205,
+ "loss": 1.7949,
+ "step": 293
+ },
+ {
+ "epoch": 0.29408271076240194,
+ "grad_norm": 0.7424570322036743,
+ "learning_rate": 0.00019528859055612468,
+ "loss": 1.6407,
+ "step": 294
+ },
+ {
+ "epoch": 0.2950829920915257,
+ "grad_norm": 0.7031363248825073,
+ "learning_rate": 0.0001952567418799492,
+ "loss": 1.8793,
+ "step": 295
+ },
+ {
+ "epoch": 0.29608327342064955,
+ "grad_norm": 0.7190185189247131,
+ "learning_rate": 0.00019522478853384155,
+ "loss": 1.6759,
+ "step": 296
+ },
+ {
+ "epoch": 0.2970835547497734,
+ "grad_norm": 0.7270736694335938,
+ "learning_rate": 0.00019519273055291266,
+ "loss": 1.6351,
+ "step": 297
+ },
+ {
+ "epoch": 0.2980838360788972,
+ "grad_norm": 0.8894152641296387,
+ "learning_rate": 0.00019516056797238846,
+ "loss": 1.7908,
+ "step": 298
+ },
+ {
+ "epoch": 0.299084117408021,
+ "grad_norm": 0.9089106321334839,
+ "learning_rate": 0.00019512830082760987,
+ "loss": 1.6018,
+ "step": 299
+ },
+ {
+ "epoch": 0.3000843987371448,
+ "grad_norm": 0.8772429823875427,
+ "learning_rate": 0.00019509592915403255,
+ "loss": 1.8474,
+ "step": 300
+ },
+ {
+ "epoch": 0.30108468006626865,
+ "grad_norm": 0.8244933485984802,
+ "learning_rate": 0.00019506345298722717,
+ "loss": 1.4324,
+ "step": 301
+ },
+ {
+ "epoch": 0.3020849613953925,
+ "grad_norm": 0.7283012866973877,
+ "learning_rate": 0.00019503087236287913,
+ "loss": 1.5115,
+ "step": 302
+ },
+ {
+ "epoch": 0.30308524272451626,
+ "grad_norm": 0.7721333503723145,
+ "learning_rate": 0.00019499818731678873,
+ "loss": 1.6728,
+ "step": 303
+ },
+ {
+ "epoch": 0.3040855240536401,
+ "grad_norm": 0.7579306960105896,
+ "learning_rate": 0.00019496539788487082,
+ "loss": 1.5927,
+ "step": 304
+ },
+ {
+ "epoch": 0.3050858053827639,
+ "grad_norm": 0.9054704308509827,
+ "learning_rate": 0.0001949325041031551,
+ "loss": 1.9027,
+ "step": 305
+ },
+ {
+ "epoch": 0.3060860867118877,
+ "grad_norm": 0.7023262977600098,
+ "learning_rate": 0.0001948995060077859,
+ "loss": 1.7705,
+ "step": 306
+ },
+ {
+ "epoch": 0.3070863680410115,
+ "grad_norm": 0.7942065000534058,
+ "learning_rate": 0.0001948664036350221,
+ "loss": 1.8269,
+ "step": 307
+ },
+ {
+ "epoch": 0.30808664937013536,
+ "grad_norm": 0.9305068850517273,
+ "learning_rate": 0.00019483319702123732,
+ "loss": 1.8247,
+ "step": 308
+ },
+ {
+ "epoch": 0.3090869306992592,
+ "grad_norm": 0.814664900302887,
+ "learning_rate": 0.00019479988620291956,
+ "loss": 1.9179,
+ "step": 309
+ },
+ {
+ "epoch": 0.31008721202838296,
+ "grad_norm": 0.6418014764785767,
+ "learning_rate": 0.00019476647121667137,
+ "loss": 1.5011,
+ "step": 310
+ },
+ {
+ "epoch": 0.3110874933575068,
+ "grad_norm": 0.7911447882652283,
+ "learning_rate": 0.00019473295209920983,
+ "loss": 1.857,
+ "step": 311
+ },
+ {
+ "epoch": 0.3120877746866306,
+ "grad_norm": 0.7792949676513672,
+ "learning_rate": 0.00019469932888736632,
+ "loss": 1.7279,
+ "step": 312
+ },
+ {
+ "epoch": 0.31308805601575446,
+ "grad_norm": 0.7579171657562256,
+ "learning_rate": 0.00019466560161808674,
+ "loss": 1.6902,
+ "step": 313
+ },
+ {
+ "epoch": 0.31408833734487823,
+ "grad_norm": 0.7052372694015503,
+ "learning_rate": 0.00019463177032843124,
+ "loss": 1.7302,
+ "step": 314
+ },
+ {
+ "epoch": 0.31508861867400206,
+ "grad_norm": 0.7188624143600464,
+ "learning_rate": 0.00019459783505557424,
+ "loss": 1.7338,
+ "step": 315
+ },
+ {
+ "epoch": 0.3160889000031259,
+ "grad_norm": 0.6057978272438049,
+ "learning_rate": 0.00019456379583680452,
+ "loss": 1.6123,
+ "step": 316
+ },
+ {
+ "epoch": 0.31708918133224967,
+ "grad_norm": 0.8339365720748901,
+ "learning_rate": 0.000194529652709525,
+ "loss": 1.9765,
+ "step": 317
+ },
+ {
+ "epoch": 0.3180894626613735,
+ "grad_norm": 0.8524260520935059,
+ "learning_rate": 0.00019449540571125286,
+ "loss": 1.6803,
+ "step": 318
+ },
+ {
+ "epoch": 0.31908974399049733,
+ "grad_norm": 0.7035975456237793,
+ "learning_rate": 0.00019446105487961926,
+ "loss": 1.5792,
+ "step": 319
+ },
+ {
+ "epoch": 0.32009002531962116,
+ "grad_norm": 0.7894249558448792,
+ "learning_rate": 0.0001944266002523696,
+ "loss": 1.6326,
+ "step": 320
+ },
+ {
+ "epoch": 0.32109030664874494,
+ "grad_norm": 0.7716989517211914,
+ "learning_rate": 0.0001943920418673633,
+ "loss": 1.6871,
+ "step": 321
+ },
+ {
+ "epoch": 0.32209058797786877,
+ "grad_norm": 0.7914933562278748,
+ "learning_rate": 0.00019435737976257377,
+ "loss": 1.7148,
+ "step": 322
+ },
+ {
+ "epoch": 0.3230908693069926,
+ "grad_norm": 0.7113205790519714,
+ "learning_rate": 0.00019432261397608834,
+ "loss": 1.5236,
+ "step": 323
+ },
+ {
+ "epoch": 0.32409115063611643,
+ "grad_norm": 0.8609917163848877,
+ "learning_rate": 0.00019428774454610843,
+ "loss": 1.8101,
+ "step": 324
+ },
+ {
+ "epoch": 0.3250914319652402,
+ "grad_norm": 0.7319685220718384,
+ "learning_rate": 0.00019425277151094913,
+ "loss": 1.7712,
+ "step": 325
+ },
+ {
+ "epoch": 0.32609171329436404,
+ "grad_norm": 0.6478747725486755,
+ "learning_rate": 0.00019421769490903957,
+ "loss": 1.8535,
+ "step": 326
+ },
+ {
+ "epoch": 0.32709199462348787,
+ "grad_norm": 0.7025763392448425,
+ "learning_rate": 0.0001941825147789225,
+ "loss": 1.9213,
+ "step": 327
+ },
+ {
+ "epoch": 0.32809227595261165,
+ "grad_norm": 0.7595239877700806,
+ "learning_rate": 0.00019414723115925456,
+ "loss": 1.7449,
+ "step": 328
+ },
+ {
+ "epoch": 0.3290925572817355,
+ "grad_norm": 0.7728105783462524,
+ "learning_rate": 0.0001941118440888061,
+ "loss": 1.8821,
+ "step": 329
+ },
+ {
+ "epoch": 0.3300928386108593,
+ "grad_norm": 0.7430977821350098,
+ "learning_rate": 0.0001940763536064611,
+ "loss": 1.6904,
+ "step": 330
+ },
+ {
+ "epoch": 0.33109311993998314,
+ "grad_norm": 0.7909367680549622,
+ "learning_rate": 0.00019404075975121716,
+ "loss": 1.7899,
+ "step": 331
+ },
+ {
+ "epoch": 0.3320934012691069,
+ "grad_norm": 0.7561226487159729,
+ "learning_rate": 0.0001940050625621855,
+ "loss": 1.7746,
+ "step": 332
+ },
+ {
+ "epoch": 0.33309368259823074,
+ "grad_norm": 0.7602452635765076,
+ "learning_rate": 0.00019396926207859084,
+ "loss": 1.7909,
+ "step": 333
+ },
+ {
+ "epoch": 0.3340939639273546,
+ "grad_norm": 0.8194379806518555,
+ "learning_rate": 0.0001939333583397715,
+ "loss": 1.7039,
+ "step": 334
+ },
+ {
+ "epoch": 0.3350942452564784,
+ "grad_norm": 0.7036342024803162,
+ "learning_rate": 0.00019389735138517915,
+ "loss": 1.6663,
+ "step": 335
+ },
+ {
+ "epoch": 0.3360945265856022,
+ "grad_norm": 0.8429521918296814,
+ "learning_rate": 0.00019386124125437895,
+ "loss": 1.589,
+ "step": 336
+ },
+ {
+ "epoch": 0.337094807914726,
+ "grad_norm": 0.7271071076393127,
+ "learning_rate": 0.00019382502798704935,
+ "loss": 1.646,
+ "step": 337
+ },
+ {
+ "epoch": 0.33809508924384984,
+ "grad_norm": 0.7862086892127991,
+ "learning_rate": 0.00019378871162298227,
+ "loss": 1.6085,
+ "step": 338
+ },
+ {
+ "epoch": 0.3390953705729737,
+ "grad_norm": 0.676815390586853,
+ "learning_rate": 0.00019375229220208276,
+ "loss": 1.7335,
+ "step": 339
+ },
+ {
+ "epoch": 0.34009565190209745,
+ "grad_norm": 0.8916042447090149,
+ "learning_rate": 0.00019371576976436917,
+ "loss": 1.7914,
+ "step": 340
+ },
+ {
+ "epoch": 0.3410959332312213,
+ "grad_norm": 0.7913751006126404,
+ "learning_rate": 0.00019367914434997312,
+ "loss": 1.6031,
+ "step": 341
+ },
+ {
+ "epoch": 0.3420962145603451,
+ "grad_norm": 0.7409866452217102,
+ "learning_rate": 0.00019364241599913924,
+ "loss": 1.6525,
+ "step": 342
+ },
+ {
+ "epoch": 0.3430964958894689,
+ "grad_norm": 0.7472705841064453,
+ "learning_rate": 0.0001936055847522254,
+ "loss": 1.6716,
+ "step": 343
+ },
+ {
+ "epoch": 0.3440967772185927,
+ "grad_norm": 0.7030773758888245,
+ "learning_rate": 0.00019356865064970244,
+ "loss": 1.7134,
+ "step": 344
+ },
+ {
+ "epoch": 0.34509705854771655,
+ "grad_norm": 0.6609564423561096,
+ "learning_rate": 0.0001935316137321543,
+ "loss": 1.7127,
+ "step": 345
+ },
+ {
+ "epoch": 0.3460973398768404,
+ "grad_norm": 0.7811393141746521,
+ "learning_rate": 0.00019349447404027782,
+ "loss": 1.75,
+ "step": 346
+ },
+ {
+ "epoch": 0.34709762120596416,
+ "grad_norm": 0.6980521082878113,
+ "learning_rate": 0.00019345723161488283,
+ "loss": 1.82,
+ "step": 347
+ },
+ {
+ "epoch": 0.348097902535088,
+ "grad_norm": 0.749796986579895,
+ "learning_rate": 0.000193419886496892,
+ "loss": 1.9755,
+ "step": 348
+ },
+ {
+ "epoch": 0.3490981838642118,
+ "grad_norm": 0.9486667513847351,
+ "learning_rate": 0.00019338243872734086,
+ "loss": 1.7047,
+ "step": 349
+ },
+ {
+ "epoch": 0.35009846519333565,
+ "grad_norm": 0.8086081147193909,
+ "learning_rate": 0.00019334488834737775,
+ "loss": 1.661,
+ "step": 350
+ },
+ {
+ "epoch": 0.3510987465224594,
+ "grad_norm": 0.700549840927124,
+ "learning_rate": 0.00019330723539826375,
+ "loss": 1.8696,
+ "step": 351
+ },
+ {
+ "epoch": 0.35209902785158326,
+ "grad_norm": 0.7465476393699646,
+ "learning_rate": 0.00019326947992137262,
+ "loss": 1.5444,
+ "step": 352
+ },
+ {
+ "epoch": 0.3530993091807071,
+ "grad_norm": 0.7370999455451965,
+ "learning_rate": 0.00019323162195819082,
+ "loss": 1.8805,
+ "step": 353
+ },
+ {
+ "epoch": 0.35409959050983086,
+ "grad_norm": 0.719359278678894,
+ "learning_rate": 0.0001931936615503174,
+ "loss": 1.8022,
+ "step": 354
+ },
+ {
+ "epoch": 0.3550998718389547,
+ "grad_norm": 0.7301434278488159,
+ "learning_rate": 0.000193155598739464,
+ "loss": 1.6984,
+ "step": 355
+ },
+ {
+ "epoch": 0.3561001531680785,
+ "grad_norm": 0.7191399335861206,
+ "learning_rate": 0.0001931174335674547,
+ "loss": 1.7229,
+ "step": 356
+ },
+ {
+ "epoch": 0.35710043449720236,
+ "grad_norm": 0.7471932768821716,
+ "learning_rate": 0.0001930791660762262,
+ "loss": 1.7408,
+ "step": 357
+ },
+ {
+ "epoch": 0.35810071582632613,
+ "grad_norm": 0.8197934031486511,
+ "learning_rate": 0.00019304079630782752,
+ "loss": 1.6938,
+ "step": 358
+ },
+ {
+ "epoch": 0.35910099715544996,
+ "grad_norm": 0.7408166527748108,
+ "learning_rate": 0.0001930023243044201,
+ "loss": 1.7798,
+ "step": 359
+ },
+ {
+ "epoch": 0.3601012784845738,
+ "grad_norm": 0.7525373101234436,
+ "learning_rate": 0.00019296375010827773,
+ "loss": 1.711,
+ "step": 360
+ },
+ {
+ "epoch": 0.3611015598136976,
+ "grad_norm": 0.6712046265602112,
+ "learning_rate": 0.00019292507376178643,
+ "loss": 1.8157,
+ "step": 361
+ },
+ {
+ "epoch": 0.3621018411428214,
+ "grad_norm": 0.6712916493415833,
+ "learning_rate": 0.00019288629530744454,
+ "loss": 1.8707,
+ "step": 362
+ },
+ {
+ "epoch": 0.36310212247194523,
+ "grad_norm": 0.6127772331237793,
+ "learning_rate": 0.0001928474147878626,
+ "loss": 1.4743,
+ "step": 363
+ },
+ {
+ "epoch": 0.36410240380106906,
+ "grad_norm": 0.910310685634613,
+ "learning_rate": 0.0001928084322457632,
+ "loss": 1.7956,
+ "step": 364
+ },
+ {
+ "epoch": 0.3651026851301929,
+ "grad_norm": 0.6267688870429993,
+ "learning_rate": 0.00019276934772398114,
+ "loss": 1.4664,
+ "step": 365
+ },
+ {
+ "epoch": 0.36610296645931667,
+ "grad_norm": 0.8317943811416626,
+ "learning_rate": 0.00019273016126546323,
+ "loss": 1.853,
+ "step": 366
+ },
+ {
+ "epoch": 0.3671032477884405,
+ "grad_norm": 0.7581344842910767,
+ "learning_rate": 0.00019269087291326833,
+ "loss": 1.9236,
+ "step": 367
+ },
+ {
+ "epoch": 0.36810352911756433,
+ "grad_norm": 0.9311390519142151,
+ "learning_rate": 0.00019265148271056722,
+ "loss": 1.7019,
+ "step": 368
+ },
+ {
+ "epoch": 0.3691038104466881,
+ "grad_norm": 0.9513958096504211,
+ "learning_rate": 0.0001926119907006426,
+ "loss": 1.7617,
+ "step": 369
+ },
+ {
+ "epoch": 0.37010409177581194,
+ "grad_norm": 0.7407613396644592,
+ "learning_rate": 0.00019257239692688907,
+ "loss": 2.1057,
+ "step": 370
+ },
+ {
+ "epoch": 0.37110437310493577,
+ "grad_norm": 0.7530227899551392,
+ "learning_rate": 0.00019253270143281296,
+ "loss": 1.9844,
+ "step": 371
+ },
+ {
+ "epoch": 0.3721046544340596,
+ "grad_norm": 0.6733037233352661,
+ "learning_rate": 0.00019249290426203252,
+ "loss": 1.8939,
+ "step": 372
+ },
+ {
+ "epoch": 0.3731049357631834,
+ "grad_norm": 0.7037007808685303,
+ "learning_rate": 0.0001924530054582776,
+ "loss": 1.6897,
+ "step": 373
+ },
+ {
+ "epoch": 0.3741052170923072,
+ "grad_norm": 0.7689145803451538,
+ "learning_rate": 0.0001924130050653898,
+ "loss": 1.6544,
+ "step": 374
+ },
+ {
+ "epoch": 0.37510549842143104,
+ "grad_norm": 0.7100968360900879,
+ "learning_rate": 0.00019237290312732226,
+ "loss": 1.7932,
+ "step": 375
+ },
+ {
+ "epoch": 0.37610577975055487,
+ "grad_norm": 0.7645193338394165,
+ "learning_rate": 0.00019233269968813984,
+ "loss": 1.6576,
+ "step": 376
+ },
+ {
+ "epoch": 0.37710606107967864,
+ "grad_norm": 0.6831678152084351,
+ "learning_rate": 0.00019229239479201876,
+ "loss": 1.6675,
+ "step": 377
+ },
+ {
+ "epoch": 0.3781063424088025,
+ "grad_norm": 0.8677794337272644,
+ "learning_rate": 0.0001922519884832469,
+ "loss": 1.6064,
+ "step": 378
+ },
+ {
+ "epoch": 0.3791066237379263,
+ "grad_norm": 0.6727691888809204,
+ "learning_rate": 0.0001922114808062234,
+ "loss": 1.6249,
+ "step": 379
+ },
+ {
+ "epoch": 0.3801069050670501,
+ "grad_norm": 0.6588670611381531,
+ "learning_rate": 0.00019217087180545893,
+ "loss": 1.5541,
+ "step": 380
+ },
+ {
+ "epoch": 0.3811071863961739,
+ "grad_norm": 0.7666369080543518,
+ "learning_rate": 0.0001921301615255754,
+ "loss": 1.7806,
+ "step": 381
+ },
+ {
+ "epoch": 0.38210746772529774,
+ "grad_norm": 0.6465156078338623,
+ "learning_rate": 0.0001920893500113061,
+ "loss": 1.512,
+ "step": 382
+ },
+ {
+ "epoch": 0.3831077490544216,
+ "grad_norm": 0.7854346632957458,
+ "learning_rate": 0.00019204843730749547,
+ "loss": 1.6857,
+ "step": 383
+ },
+ {
+ "epoch": 0.38410803038354535,
+ "grad_norm": 0.6625111103057861,
+ "learning_rate": 0.00019200742345909915,
+ "loss": 1.7033,
+ "step": 384
+ },
+ {
+ "epoch": 0.3851083117126692,
+ "grad_norm": 0.7273709177970886,
+ "learning_rate": 0.00019196630851118398,
+ "loss": 1.665,
+ "step": 385
+ },
+ {
+ "epoch": 0.386108593041793,
+ "grad_norm": 0.6861465573310852,
+ "learning_rate": 0.0001919250925089278,
+ "loss": 1.5028,
+ "step": 386
+ },
+ {
+ "epoch": 0.38710887437091684,
+ "grad_norm": 0.845456063747406,
+ "learning_rate": 0.00019188377549761963,
+ "loss": 1.967,
+ "step": 387
+ },
+ {
+ "epoch": 0.3881091557000406,
+ "grad_norm": 0.6481165289878845,
+ "learning_rate": 0.00019184235752265928,
+ "loss": 1.6053,
+ "step": 388
+ },
+ {
+ "epoch": 0.38910943702916445,
+ "grad_norm": 0.6312947273254395,
+ "learning_rate": 0.00019180083862955772,
+ "loss": 1.4427,
+ "step": 389
+ },
+ {
+ "epoch": 0.3901097183582883,
+ "grad_norm": 0.7874154448509216,
+ "learning_rate": 0.00019175921886393666,
+ "loss": 1.6099,
+ "step": 390
+ },
+ {
+ "epoch": 0.39110999968741206,
+ "grad_norm": 0.6839481592178345,
+ "learning_rate": 0.00019171749827152869,
+ "loss": 1.7004,
+ "step": 391
+ },
+ {
+ "epoch": 0.3921102810165359,
+ "grad_norm": 0.7239277362823486,
+ "learning_rate": 0.0001916756768981772,
+ "loss": 1.8813,
+ "step": 392
+ },
+ {
+ "epoch": 0.3931105623456597,
+ "grad_norm": 0.8241100311279297,
+ "learning_rate": 0.00019163375478983632,
+ "loss": 1.9443,
+ "step": 393
+ },
+ {
+ "epoch": 0.39411084367478355,
+ "grad_norm": 0.7401999235153198,
+ "learning_rate": 0.00019159173199257085,
+ "loss": 1.6663,
+ "step": 394
+ },
+ {
+ "epoch": 0.3951111250039073,
+ "grad_norm": 0.8297036290168762,
+ "learning_rate": 0.00019154960855255628,
+ "loss": 1.8012,
+ "step": 395
+ },
+ {
+ "epoch": 0.39611140633303116,
+ "grad_norm": 0.9661216735839844,
+ "learning_rate": 0.0001915073845160786,
+ "loss": 1.7007,
+ "step": 396
+ },
+ {
+ "epoch": 0.397111687662155,
+ "grad_norm": 1.4041926860809326,
+ "learning_rate": 0.00019146505992953446,
+ "loss": 1.7232,
+ "step": 397
+ },
+ {
+ "epoch": 0.3981119689912788,
+ "grad_norm": 0.8469036221504211,
+ "learning_rate": 0.00019142263483943085,
+ "loss": 1.4479,
+ "step": 398
+ },
+ {
+ "epoch": 0.3991122503204026,
+ "grad_norm": 0.9476561546325684,
+ "learning_rate": 0.00019138010929238534,
+ "loss": 1.8572,
+ "step": 399
+ },
+ {
+ "epoch": 0.4001125316495264,
+ "grad_norm": 0.7196705937385559,
+ "learning_rate": 0.00019133748333512575,
+ "loss": 1.6184,
+ "step": 400
+ },
+ {
+ "epoch": 0.40111281297865026,
+ "grad_norm": 0.8957480192184448,
+ "learning_rate": 0.00019129475701449035,
+ "loss": 1.762,
+ "step": 401
+ },
+ {
+ "epoch": 0.4021130943077741,
+ "grad_norm": 0.7850635647773743,
+ "learning_rate": 0.0001912519303774276,
+ "loss": 1.6764,
+ "step": 402
+ },
+ {
+ "epoch": 0.40311337563689786,
+ "grad_norm": 0.7579814791679382,
+ "learning_rate": 0.0001912090034709963,
+ "loss": 1.6231,
+ "step": 403
+ },
+ {
+ "epoch": 0.4041136569660217,
+ "grad_norm": 0.7173107266426086,
+ "learning_rate": 0.00019116597634236525,
+ "loss": 1.7107,
+ "step": 404
+ },
+ {
+ "epoch": 0.4051139382951455,
+ "grad_norm": 0.7832950353622437,
+ "learning_rate": 0.0001911228490388136,
+ "loss": 1.8608,
+ "step": 405
+ },
+ {
+ "epoch": 0.4061142196242693,
+ "grad_norm": 0.716299295425415,
+ "learning_rate": 0.00019107962160773035,
+ "loss": 1.652,
+ "step": 406
+ },
+ {
+ "epoch": 0.40711450095339313,
+ "grad_norm": 0.6675253510475159,
+ "learning_rate": 0.0001910362940966147,
+ "loss": 1.5963,
+ "step": 407
+ },
+ {
+ "epoch": 0.40811478228251696,
+ "grad_norm": 0.6555336713790894,
+ "learning_rate": 0.00019099286655307568,
+ "loss": 1.4991,
+ "step": 408
+ },
+ {
+ "epoch": 0.4091150636116408,
+ "grad_norm": 0.7307867407798767,
+ "learning_rate": 0.0001909493390248324,
+ "loss": 1.8221,
+ "step": 409
+ },
+ {
+ "epoch": 0.41011534494076457,
+ "grad_norm": 0.6557430624961853,
+ "learning_rate": 0.00019090571155971366,
+ "loss": 1.6484,
+ "step": 410
+ },
+ {
+ "epoch": 0.4111156262698884,
+ "grad_norm": 0.6816605925559998,
+ "learning_rate": 0.00019086198420565823,
+ "loss": 1.5052,
+ "step": 411
+ },
+ {
+ "epoch": 0.41211590759901223,
+ "grad_norm": 0.66513592004776,
+ "learning_rate": 0.00019081815701071445,
+ "loss": 1.8818,
+ "step": 412
+ },
+ {
+ "epoch": 0.41311618892813606,
+ "grad_norm": 0.6807469129562378,
+ "learning_rate": 0.0001907742300230406,
+ "loss": 1.5997,
+ "step": 413
+ },
+ {
+ "epoch": 0.41411647025725984,
+ "grad_norm": 0.8060654401779175,
+ "learning_rate": 0.00019073020329090444,
+ "loss": 1.8099,
+ "step": 414
+ },
+ {
+ "epoch": 0.41511675158638367,
+ "grad_norm": 0.7440110445022583,
+ "learning_rate": 0.0001906860768626834,
+ "loss": 1.4876,
+ "step": 415
+ },
+ {
+ "epoch": 0.4161170329155075,
+ "grad_norm": 0.7675415277481079,
+ "learning_rate": 0.00019064185078686443,
+ "loss": 1.4722,
+ "step": 416
+ },
+ {
+ "epoch": 0.4171173142446313,
+ "grad_norm": 0.6656553149223328,
+ "learning_rate": 0.000190597525112044,
+ "loss": 1.6453,
+ "step": 417
+ },
+ {
+ "epoch": 0.4181175955737551,
+ "grad_norm": 0.730689287185669,
+ "learning_rate": 0.000190553099886928,
+ "loss": 1.6584,
+ "step": 418
+ },
+ {
+ "epoch": 0.41911787690287894,
+ "grad_norm": 0.8425858616828918,
+ "learning_rate": 0.00019050857516033173,
+ "loss": 1.6249,
+ "step": 419
+ },
+ {
+ "epoch": 0.42011815823200277,
+ "grad_norm": 0.7816892266273499,
+ "learning_rate": 0.00019046395098117983,
+ "loss": 1.7532,
+ "step": 420
+ },
+ {
+ "epoch": 0.42111843956112655,
+ "grad_norm": 0.7324026823043823,
+ "learning_rate": 0.00019041922739850616,
+ "loss": 1.8523,
+ "step": 421
+ },
+ {
+ "epoch": 0.4221187208902504,
+ "grad_norm": 0.7473389506340027,
+ "learning_rate": 0.00019037440446145385,
+ "loss": 1.583,
+ "step": 422
+ },
+ {
+ "epoch": 0.4231190022193742,
+ "grad_norm": 0.8720895051956177,
+ "learning_rate": 0.00019032948221927524,
+ "loss": 1.6806,
+ "step": 423
+ },
+ {
+ "epoch": 0.42411928354849804,
+ "grad_norm": 0.728528618812561,
+ "learning_rate": 0.00019028446072133175,
+ "loss": 1.7283,
+ "step": 424
+ },
+ {
+ "epoch": 0.4251195648776218,
+ "grad_norm": 0.739930272102356,
+ "learning_rate": 0.00019023934001709383,
+ "loss": 1.7244,
+ "step": 425
+ },
+ {
+ "epoch": 0.42611984620674564,
+ "grad_norm": 0.7825399041175842,
+ "learning_rate": 0.00019019412015614098,
+ "loss": 1.7871,
+ "step": 426
+ },
+ {
+ "epoch": 0.4271201275358695,
+ "grad_norm": 0.8878734707832336,
+ "learning_rate": 0.00019014880118816164,
+ "loss": 1.6018,
+ "step": 427
+ },
+ {
+ "epoch": 0.4281204088649933,
+ "grad_norm": 0.726259708404541,
+ "learning_rate": 0.0001901033831629532,
+ "loss": 1.7732,
+ "step": 428
+ },
+ {
+ "epoch": 0.4291206901941171,
+ "grad_norm": 0.7620319724082947,
+ "learning_rate": 0.00019005786613042185,
+ "loss": 1.6466,
+ "step": 429
+ },
+ {
+ "epoch": 0.4301209715232409,
+ "grad_norm": 0.7295501828193665,
+ "learning_rate": 0.00019001225014058255,
+ "loss": 1.8708,
+ "step": 430
+ },
+ {
+ "epoch": 0.43112125285236474,
+ "grad_norm": 0.7419458031654358,
+ "learning_rate": 0.00018996653524355902,
+ "loss": 1.6583,
+ "step": 431
+ },
+ {
+ "epoch": 0.4321215341814885,
+ "grad_norm": 0.7701705098152161,
+ "learning_rate": 0.00018992072148958368,
+ "loss": 1.4421,
+ "step": 432
+ },
+ {
+ "epoch": 0.43312181551061235,
+ "grad_norm": 0.8237659931182861,
+ "learning_rate": 0.00018987480892899758,
+ "loss": 1.844,
+ "step": 433
+ },
+ {
+ "epoch": 0.4341220968397362,
+ "grad_norm": 0.6167672276496887,
+ "learning_rate": 0.00018982879761225027,
+ "loss": 1.6193,
+ "step": 434
+ },
+ {
+ "epoch": 0.43512237816886,
+ "grad_norm": 0.7565534710884094,
+ "learning_rate": 0.00018978268758989991,
+ "loss": 1.7655,
+ "step": 435
+ },
+ {
+ "epoch": 0.4361226594979838,
+ "grad_norm": 0.8333333730697632,
+ "learning_rate": 0.00018973647891261307,
+ "loss": 1.5764,
+ "step": 436
+ },
+ {
+ "epoch": 0.4371229408271076,
+ "grad_norm": 0.7404434084892273,
+ "learning_rate": 0.00018969017163116472,
+ "loss": 1.7922,
+ "step": 437
+ },
+ {
+ "epoch": 0.43812322215623145,
+ "grad_norm": 0.7129400372505188,
+ "learning_rate": 0.0001896437657964382,
+ "loss": 1.6925,
+ "step": 438
+ },
+ {
+ "epoch": 0.4391235034853553,
+ "grad_norm": 0.7750307321548462,
+ "learning_rate": 0.00018959726145942508,
+ "loss": 1.8133,
+ "step": 439
+ },
+ {
+ "epoch": 0.44012378481447906,
+ "grad_norm": 0.7244920134544373,
+ "learning_rate": 0.00018955065867122528,
+ "loss": 1.6425,
+ "step": 440
+ },
+ {
+ "epoch": 0.4411240661436029,
+ "grad_norm": 0.7624644637107849,
+ "learning_rate": 0.00018950395748304678,
+ "loss": 1.886,
+ "step": 441
+ },
+ {
+ "epoch": 0.4421243474727267,
+ "grad_norm": 0.7016286849975586,
+ "learning_rate": 0.0001894571579462058,
+ "loss": 1.7308,
+ "step": 442
+ },
+ {
+ "epoch": 0.4431246288018505,
+ "grad_norm": 0.6965353488922119,
+ "learning_rate": 0.00018941026011212654,
+ "loss": 1.5872,
+ "step": 443
+ },
+ {
+ "epoch": 0.4441249101309743,
+ "grad_norm": 0.7479360699653625,
+ "learning_rate": 0.00018936326403234125,
+ "loss": 1.8129,
+ "step": 444
+ },
+ {
+ "epoch": 0.44512519146009816,
+ "grad_norm": 0.7027686834335327,
+ "learning_rate": 0.00018931616975849006,
+ "loss": 1.6433,
+ "step": 445
+ },
+ {
+ "epoch": 0.446125472789222,
+ "grad_norm": 0.7771592140197754,
+ "learning_rate": 0.00018926897734232115,
+ "loss": 1.4645,
+ "step": 446
+ },
+ {
+ "epoch": 0.44712575411834576,
+ "grad_norm": 0.7766458988189697,
+ "learning_rate": 0.0001892216868356904,
+ "loss": 1.7873,
+ "step": 447
+ },
+ {
+ "epoch": 0.4481260354474696,
+ "grad_norm": 0.8146182298660278,
+ "learning_rate": 0.0001891742982905615,
+ "loss": 1.7935,
+ "step": 448
+ },
+ {
+ "epoch": 0.4491263167765934,
+ "grad_norm": 0.6744781136512756,
+ "learning_rate": 0.00018912681175900598,
+ "loss": 1.8916,
+ "step": 449
+ },
+ {
+ "epoch": 0.45012659810571726,
+ "grad_norm": 0.6259024143218994,
+ "learning_rate": 0.00018907922729320285,
+ "loss": 1.6616,
+ "step": 450
+ },
+ {
+ "epoch": 0.45112687943484103,
+ "grad_norm": 0.7717494368553162,
+ "learning_rate": 0.00018903154494543889,
+ "loss": 1.817,
+ "step": 451
+ },
+ {
+ "epoch": 0.45212716076396486,
+ "grad_norm": 0.648040771484375,
+ "learning_rate": 0.00018898376476810834,
+ "loss": 1.6309,
+ "step": 452
+ },
+ {
+ "epoch": 0.4531274420930887,
+ "grad_norm": 0.7560017704963684,
+ "learning_rate": 0.00018893588681371303,
+ "loss": 1.8016,
+ "step": 453
+ },
+ {
+ "epoch": 0.45412772342221247,
+ "grad_norm": 0.8778628706932068,
+ "learning_rate": 0.00018888791113486213,
+ "loss": 1.7797,
+ "step": 454
+ },
+ {
+ "epoch": 0.4551280047513363,
+ "grad_norm": 0.6795655488967896,
+ "learning_rate": 0.00018883983778427227,
+ "loss": 1.6343,
+ "step": 455
+ },
+ {
+ "epoch": 0.45612828608046013,
+ "grad_norm": 0.6690213084220886,
+ "learning_rate": 0.0001887916668147673,
+ "loss": 1.6224,
+ "step": 456
+ },
+ {
+ "epoch": 0.45712856740958396,
+ "grad_norm": 0.7529327869415283,
+ "learning_rate": 0.00018874339827927846,
+ "loss": 1.6396,
+ "step": 457
+ },
+ {
+ "epoch": 0.45812884873870774,
+ "grad_norm": 0.8393098711967468,
+ "learning_rate": 0.00018869503223084414,
+ "loss": 1.8374,
+ "step": 458
+ },
+ {
+ "epoch": 0.45912913006783157,
+ "grad_norm": 0.7435776591300964,
+ "learning_rate": 0.00018864656872260985,
+ "loss": 1.7363,
+ "step": 459
+ },
+ {
+ "epoch": 0.4601294113969554,
+ "grad_norm": 0.6737055778503418,
+ "learning_rate": 0.00018859800780782828,
+ "loss": 1.6661,
+ "step": 460
+ },
+ {
+ "epoch": 0.46112969272607923,
+ "grad_norm": 0.7674340605735779,
+ "learning_rate": 0.000188549349539859,
+ "loss": 1.6269,
+ "step": 461
+ },
+ {
+ "epoch": 0.462129974055203,
+ "grad_norm": 0.7329950928688049,
+ "learning_rate": 0.00018850059397216876,
+ "loss": 1.6989,
+ "step": 462
+ },
+ {
+ "epoch": 0.46313025538432684,
+ "grad_norm": 0.7075778841972351,
+ "learning_rate": 0.00018845174115833099,
+ "loss": 1.7286,
+ "step": 463
+ },
+ {
+ "epoch": 0.46413053671345067,
+ "grad_norm": 0.7973611354827881,
+ "learning_rate": 0.0001884027911520262,
+ "loss": 1.7478,
+ "step": 464
+ },
+ {
+ "epoch": 0.4651308180425745,
+ "grad_norm": 0.7790057063102722,
+ "learning_rate": 0.00018835374400704154,
+ "loss": 1.6659,
+ "step": 465
+ },
+ {
+ "epoch": 0.4661310993716983,
+ "grad_norm": 0.8505310416221619,
+ "learning_rate": 0.00018830459977727096,
+ "loss": 1.6879,
+ "step": 466
+ },
+ {
+ "epoch": 0.4671313807008221,
+ "grad_norm": 0.7616267800331116,
+ "learning_rate": 0.0001882553585167151,
+ "loss": 1.6525,
+ "step": 467
+ },
+ {
+ "epoch": 0.46813166202994594,
+ "grad_norm": 0.8038133978843689,
+ "learning_rate": 0.00018820602027948114,
+ "loss": 1.7929,
+ "step": 468
+ },
+ {
+ "epoch": 0.4691319433590697,
+ "grad_norm": 0.6762365698814392,
+ "learning_rate": 0.00018815658511978298,
+ "loss": 1.6926,
+ "step": 469
+ },
+ {
+ "epoch": 0.47013222468819355,
+ "grad_norm": 0.6515015959739685,
+ "learning_rate": 0.00018810705309194083,
+ "loss": 1.7817,
+ "step": 470
+ },
+ {
+ "epoch": 0.4711325060173174,
+ "grad_norm": 0.696675181388855,
+ "learning_rate": 0.00018805742425038145,
+ "loss": 1.7195,
+ "step": 471
+ },
+ {
+ "epoch": 0.4721327873464412,
+ "grad_norm": 0.7929533123970032,
+ "learning_rate": 0.00018800769864963802,
+ "loss": 2.0165,
+ "step": 472
+ },
+ {
+ "epoch": 0.473133068675565,
+ "grad_norm": 0.7223886251449585,
+ "learning_rate": 0.00018795787634434994,
+ "loss": 1.6708,
+ "step": 473
+ },
+ {
+ "epoch": 0.4741333500046888,
+ "grad_norm": 0.7982028126716614,
+ "learning_rate": 0.0001879079573892629,
+ "loss": 1.628,
+ "step": 474
+ },
+ {
+ "epoch": 0.47513363133381264,
+ "grad_norm": 0.6962152123451233,
+ "learning_rate": 0.00018785794183922883,
+ "loss": 1.6429,
+ "step": 475
+ },
+ {
+ "epoch": 0.4761339126629365,
+ "grad_norm": 0.687489926815033,
+ "learning_rate": 0.00018780782974920572,
+ "loss": 1.4546,
+ "step": 476
+ },
+ {
+ "epoch": 0.47713419399206025,
+ "grad_norm": 0.7260375022888184,
+ "learning_rate": 0.00018775762117425777,
+ "loss": 1.739,
+ "step": 477
+ },
+ {
+ "epoch": 0.4781344753211841,
+ "grad_norm": 0.759400486946106,
+ "learning_rate": 0.0001877073161695551,
+ "loss": 1.6465,
+ "step": 478
+ },
+ {
+ "epoch": 0.4791347566503079,
+ "grad_norm": 0.7412364482879639,
+ "learning_rate": 0.00018765691479037376,
+ "loss": 1.7333,
+ "step": 479
+ },
+ {
+ "epoch": 0.4801350379794317,
+ "grad_norm": 0.6909674406051636,
+ "learning_rate": 0.00018760641709209583,
+ "loss": 1.6936,
+ "step": 480
+ },
+ {
+ "epoch": 0.4811353193085555,
+ "grad_norm": 0.6478050947189331,
+ "learning_rate": 0.0001875558231302091,
+ "loss": 1.4435,
+ "step": 481
+ },
+ {
+ "epoch": 0.48213560063767935,
+ "grad_norm": 0.6662638187408447,
+ "learning_rate": 0.00018750513296030718,
+ "loss": 1.5567,
+ "step": 482
+ },
+ {
+ "epoch": 0.4831358819668032,
+ "grad_norm": 0.6973413825035095,
+ "learning_rate": 0.00018745434663808942,
+ "loss": 1.6434,
+ "step": 483
+ },
+ {
+ "epoch": 0.48413616329592696,
+ "grad_norm": 0.8381956815719604,
+ "learning_rate": 0.0001874034642193608,
+ "loss": 1.8568,
+ "step": 484
+ },
+ {
+ "epoch": 0.4851364446250508,
+ "grad_norm": 0.6522262096405029,
+ "learning_rate": 0.0001873524857600319,
+ "loss": 1.4265,
+ "step": 485
+ },
+ {
+ "epoch": 0.4861367259541746,
+ "grad_norm": 0.7018440961837769,
+ "learning_rate": 0.00018730141131611882,
+ "loss": 1.6914,
+ "step": 486
+ },
+ {
+ "epoch": 0.48713700728329845,
+ "grad_norm": 0.8237236142158508,
+ "learning_rate": 0.00018725024094374315,
+ "loss": 1.4462,
+ "step": 487
+ },
+ {
+ "epoch": 0.4881372886124222,
+ "grad_norm": 0.6507928967475891,
+ "learning_rate": 0.00018719897469913184,
+ "loss": 1.5802,
+ "step": 488
+ },
+ {
+ "epoch": 0.48913756994154606,
+ "grad_norm": 0.8120100498199463,
+ "learning_rate": 0.00018714761263861728,
+ "loss": 1.7819,
+ "step": 489
+ },
+ {
+ "epoch": 0.4901378512706699,
+ "grad_norm": 0.759722888469696,
+ "learning_rate": 0.000187096154818637,
+ "loss": 1.7481,
+ "step": 490
+ },
+ {
+ "epoch": 0.4911381325997937,
+ "grad_norm": 0.7146822214126587,
+ "learning_rate": 0.00018704460129573391,
+ "loss": 1.6217,
+ "step": 491
+ },
+ {
+ "epoch": 0.4921384139289175,
+ "grad_norm": 0.7138429284095764,
+ "learning_rate": 0.00018699295212655596,
+ "loss": 1.7242,
+ "step": 492
+ },
+ {
+ "epoch": 0.4931386952580413,
+ "grad_norm": 0.8145613670349121,
+ "learning_rate": 0.00018694120736785632,
+ "loss": 1.8759,
+ "step": 493
+ },
+ {
+ "epoch": 0.49413897658716516,
+ "grad_norm": 0.6624435186386108,
+ "learning_rate": 0.00018688936707649304,
+ "loss": 1.8632,
+ "step": 494
+ },
+ {
+ "epoch": 0.49513925791628893,
+ "grad_norm": 0.6550843119621277,
+ "learning_rate": 0.00018683743130942928,
+ "loss": 1.7645,
+ "step": 495
+ },
+ {
+ "epoch": 0.49613953924541276,
+ "grad_norm": 0.6931488513946533,
+ "learning_rate": 0.00018678540012373302,
+ "loss": 1.5899,
+ "step": 496
+ },
+ {
+ "epoch": 0.4971398205745366,
+ "grad_norm": 0.684229850769043,
+ "learning_rate": 0.00018673327357657715,
+ "loss": 1.6902,
+ "step": 497
+ },
+ {
+ "epoch": 0.4981401019036604,
+ "grad_norm": 0.7380666136741638,
+ "learning_rate": 0.0001866810517252393,
+ "loss": 1.7115,
+ "step": 498
+ },
+ {
+ "epoch": 0.4991403832327842,
+ "grad_norm": 0.6782827973365784,
+ "learning_rate": 0.00018662873462710184,
+ "loss": 1.495,
+ "step": 499
+ },
+ {
+ "epoch": 0.5001406645619081,
+ "grad_norm": 0.730248749256134,
+ "learning_rate": 0.0001865763223396518,
+ "loss": 1.5147,
+ "step": 500
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 2997,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 3,
+ "save_steps": 500,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 4045941965881344.0,
+ "train_batch_size": 2,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-500/training_args.bin b/checkpoint-500/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..6831a6d623a8a2b84942bb5584c6aa5bc14eee51
--- /dev/null
+++ b/checkpoint-500/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5db131d6e82df60077bab037ec35113e1b0836a0bd72bb0a21e3fc0311a527de
+size 5304
diff --git a/runs/Jul24_03-40-20_5c574ba6a366/events.out.tfevents.1721792422.5c574ba6a366.1559.0 b/runs/Jul24_03-40-20_5c574ba6a366/events.out.tfevents.1721792422.5c574ba6a366.1559.0
new file mode 100644
index 0000000000000000000000000000000000000000..828b1ea07d905c44409ef7326aad49ce128f1c81
--- /dev/null
+++ b/runs/Jul24_03-40-20_5c574ba6a366/events.out.tfevents.1721792422.5c574ba6a366.1559.0
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:44d9d29c708c7c9ee1da6fc469318349e762e3fcf5480b61012f38743d778d56
+size 5295
diff --git a/runs/Jul24_03-51-06_5c574ba6a366/events.out.tfevents.1721793070.5c574ba6a366.1559.1 b/runs/Jul24_03-51-06_5c574ba6a366/events.out.tfevents.1721793070.5c574ba6a366.1559.1
new file mode 100644
index 0000000000000000000000000000000000000000..23b75c07f0159cafd8cc060b530bfd93b8563102
--- /dev/null
+++ b/runs/Jul24_03-51-06_5c574ba6a366/events.out.tfevents.1721793070.5c574ba6a366.1559.1
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9442463afc3f86248ebcb38678243ef810cb51f903cb67c1af421b945eb71884
+size 637508