Upload folder using huggingface_hub (#2)
Browse files- Upload folder using huggingface_hub (04acda525caefb0dde05296ab5ab5dcb6185ec0d)
- tokenizer_config.json +11 -6
tokenizer_config.json
CHANGED
@@ -1,6 +1,4 @@
|
|
1 |
{
|
2 |
-
"add_bos_token": true,
|
3 |
-
"add_eos_token": false,
|
4 |
"added_tokens_decoder": {
|
5 |
"0": {
|
6 |
"content": "<unk>",
|
@@ -25,10 +23,18 @@
|
|
25 |
"rstrip": false,
|
26 |
"single_word": false,
|
27 |
"special": true
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
}
|
29 |
},
|
30 |
-
"additional_special_tokens": [],
|
31 |
"bos_token": "<s>",
|
|
|
32 |
"clean_up_tokenization_spaces": false,
|
33 |
"eos_token": "</s>",
|
34 |
"legacy": true,
|
@@ -38,6 +44,5 @@
|
|
38 |
"spaces_between_special_tokens": false,
|
39 |
"tokenizer_class": "LlamaTokenizer",
|
40 |
"unk_token": "<unk>",
|
41 |
-
"use_default_system_prompt": false
|
42 |
-
|
43 |
-
}
|
|
|
1 |
{
|
|
|
|
|
2 |
"added_tokens_decoder": {
|
3 |
"0": {
|
4 |
"content": "<unk>",
|
|
|
23 |
"rstrip": false,
|
24 |
"single_word": false,
|
25 |
"special": true
|
26 |
+
},
|
27 |
+
"32015": {
|
28 |
+
"content": "<step>",
|
29 |
+
"lstrip": true,
|
30 |
+
"normalized": false,
|
31 |
+
"rstrip": true,
|
32 |
+
"single_word": true,
|
33 |
+
"special": false
|
34 |
}
|
35 |
},
|
|
|
36 |
"bos_token": "<s>",
|
37 |
+
"chat_template": "{% if messages[0]['role'] == 'system' %}{% set user_index = 1 %}{% else %}{% set user_index = 0 %}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != ((loop.index0 + user_index) % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if loop.index0 == 0 %}{{ '<s>' }}{% endif %}{% set content = 'Source: ' + message['role'] + '\n\n ' + message['content'].strip() %}{{ content + ' <step> ' }}{% endfor %}{{'Source: assistant\nDestination: user\n\n '}}",
|
38 |
"clean_up_tokenization_spaces": false,
|
39 |
"eos_token": "</s>",
|
40 |
"legacy": true,
|
|
|
44 |
"spaces_between_special_tokens": false,
|
45 |
"tokenizer_class": "LlamaTokenizer",
|
46 |
"unk_token": "<unk>",
|
47 |
+
"use_default_system_prompt": false
|
48 |
+
}
|
|