geonmin-kim commited on
Commit
27d0e1c
1 Parent(s): d7a0c34

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -33,8 +33,8 @@
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
- "torch_dtype": "float32",
37
- "transformers_version": "4.43.1",
38
- "use_cache": false,
39
  "vocab_size": 128256
40
  }
 
33
  },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
+ "torch_dtype": "float16",
37
+ "transformers_version": "4.45.2",
38
+ "use_cache": true,
39
  "vocab_size": 128256
40
  }
generation_config.json CHANGED
@@ -6,5 +6,5 @@
6
  128008,
7
  128009
8
  ],
9
- "transformers_version": "4.43.1"
10
  }
 
6
  128008,
7
  128009
8
  ],
9
+ "transformers_version": "4.45.2"
10
  }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dabc8af1c83419907ca8a129fbf9dbdec58eedc1fc4fcef87b15441680c3f3db
3
+ size 4282490318
special_tokens_map.json CHANGED
@@ -12,6 +12,5 @@
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
- },
16
- "pad_token": "<|eot_id|>"
17
  }
 
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
+ }
 
16
  }
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 256,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
tokenizer_config.json CHANGED
@@ -2058,6 +2058,5 @@
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 131072,
2061
- "pad_token": "<|eot_id|>",
2062
  "tokenizer_class": "PreTrainedTokenizerFast"
2063
  }
 
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 131072,
 
2061
  "tokenizer_class": "PreTrainedTokenizerFast"
2062
  }