arnocandel
commited on
Commit
•
b894796
1
Parent(s):
946b3a1
Upload 12 files
Browse files- config.json +9 -3
- generation_config.json +1 -1
- pytorch_model-00001-of-00005.bin +1 -1
- pytorch_model-00002-of-00005.bin +1 -1
- pytorch_model-00003-of-00005.bin +1 -1
- pytorch_model-00004-of-00005.bin +1 -1
- pytorch_model-00005-of-00005.bin +1 -1
- tokenizer.json +1 -0
config.json
CHANGED
@@ -1,15 +1,21 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"architectures": [
|
4 |
"GPTNeoXForCausalLM"
|
5 |
],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
"custom_pipelines": {
|
7 |
"text-generation": {
|
8 |
"impl": "h2oai_pipeline.H2OTextGenerationPipeline",
|
9 |
"pt": "AutoModelForCausalLM"
|
10 |
}
|
11 |
},
|
12 |
-
"bos_token_id": 0,
|
13 |
"eos_token_id": 0,
|
14 |
"hidden_act": "gelu",
|
15 |
"hidden_size": 5120,
|
@@ -24,7 +30,7 @@
|
|
24 |
"rotary_pct": 0.25,
|
25 |
"tie_word_embeddings": false,
|
26 |
"torch_dtype": "float16",
|
27 |
-
"transformers_version": "4.28.
|
28 |
"use_cache": true,
|
29 |
"use_parallel_residual": true,
|
30 |
"vocab_size": 50688
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "h2oai/h2ogpt-oasst1-512-12b",
|
3 |
"architectures": [
|
4 |
"GPTNeoXForCausalLM"
|
5 |
],
|
6 |
+
"bos_token_id": 0,
|
7 |
+
"custom_pipeline": {
|
8 |
+
"text-generation": {
|
9 |
+
"impl": "h2oai_pipeline.H2OTextGenerationPipeline",
|
10 |
+
"pt": "AutoModelForCausalLM"
|
11 |
+
}
|
12 |
+
},
|
13 |
"custom_pipelines": {
|
14 |
"text-generation": {
|
15 |
"impl": "h2oai_pipeline.H2OTextGenerationPipeline",
|
16 |
"pt": "AutoModelForCausalLM"
|
17 |
}
|
18 |
},
|
|
|
19 |
"eos_token_id": 0,
|
20 |
"hidden_act": "gelu",
|
21 |
"hidden_size": 5120,
|
|
|
30 |
"rotary_pct": 0.25,
|
31 |
"tie_word_embeddings": false,
|
32 |
"torch_dtype": "float16",
|
33 |
+
"transformers_version": "4.28.1",
|
34 |
"use_cache": true,
|
35 |
"use_parallel_residual": true,
|
36 |
"vocab_size": 50688
|
generation_config.json
CHANGED
@@ -2,5 +2,5 @@
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 0,
|
4 |
"eos_token_id": 0,
|
5 |
-
"transformers_version": "4.28.
|
6 |
}
|
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 0,
|
4 |
"eos_token_id": 0,
|
5 |
+
"transformers_version": "4.28.1"
|
6 |
}
|
pytorch_model-00001-of-00005.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4957630318
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7d21435593086b58709fd1598a039524ff6dacca18f996edd120aa05f6d1cbce
|
3 |
size 4957630318
|
pytorch_model-00002-of-00005.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4853861544
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:aca3ba1593cc4dd0cfd529ec24c4fbf53481ad8e6d5ff9b81ca9d208f2fbedf8
|
3 |
size 4853861544
|
pytorch_model-00003-of-00005.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4858068625
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7ac87c950ae759c15bb50c40263bc26f10a9f106e07e127361c8c7635273f0d1
|
3 |
size 4858068625
|
pytorch_model-00004-of-00005.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5015385889
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:29ad9d68225ceb2fe58373c64a38a8670fffc9e6794d51e2794c5c113e129e89
|
3 |
size 5015385889
|
pytorch_model-00005-of-00005.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4158379959
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9719e633aa21824604cc52e0cc4b586c173bb49efd3e2ac75fda0201be0cd66c
|
3 |
size 4158379959
|
tokenizer.json
CHANGED
@@ -257,6 +257,7 @@
|
|
257 |
"continuing_subword_prefix": null,
|
258 |
"end_of_word_suffix": null,
|
259 |
"fuse_unk": false,
|
|
|
260 |
"vocab": {
|
261 |
"<|endoftext|>": 0,
|
262 |
"<|padding|>": 1,
|
|
|
257 |
"continuing_subword_prefix": null,
|
258 |
"end_of_word_suffix": null,
|
259 |
"fuse_unk": false,
|
260 |
+
"byte_fallback": false,
|
261 |
"vocab": {
|
262 |
"<|endoftext|>": 0,
|
263 |
"<|padding|>": 1,
|