AWQ model commit
Browse files- eval_results.json +1 -0
- generation_config.json +1 -1
- model-00001-of-00003.safetensors +1 -1
- model-00002-of-00003.safetensors +1 -1
- model-00003-of-00003.safetensors +1 -1
- tokenizer_config.json +1 -0
eval_results.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
[]
|
generation_config.json
CHANGED
@@ -2,5 +2,5 @@
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
-
"transformers_version": "4.
|
6 |
}
|
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
+
"transformers_version": "4.37.0.dev0"
|
6 |
}
|
model-00001-of-00003.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 9973295104
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2f46792b14469576eb220f9d54599d3cd790dbd1bd4e1b50113dbfe5537474e3
|
3 |
size 9973295104
|
model-00002-of-00003.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 9977085640
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f7aee04e93522c6f90e2dccb6d1f232684458b9164990dce51a96a1e6d439633
|
3 |
size 9977085640
|
model-00003-of-00003.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4703607368
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:699737875e4dbe17eac83ade10f0284e6ce3559f9e5ef9e8e853718a1fde5a32
|
3 |
size 4703607368
|
tokenizer_config.json
CHANGED
@@ -45,6 +45,7 @@
|
|
45 |
},
|
46 |
"additional_special_tokens": [],
|
47 |
"bos_token": "<s>",
|
|
|
48 |
"clean_up_tokenization_spaces": false,
|
49 |
"eos_token": "<|im_end|>",
|
50 |
"legacy": true,
|
|
|
45 |
},
|
46 |
"additional_special_tokens": [],
|
47 |
"bos_token": "<s>",
|
48 |
+
"chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
49 |
"clean_up_tokenization_spaces": false,
|
50 |
"eos_token": "<|im_end|>",
|
51 |
"legacy": true,
|