File size: 10,508 Bytes
22d7b97 aab7222 39fde06 22d7b97 118aa57 77399ca e6addfc 22d7b97 39fde06 50e0ab9 06ebcc8 50e0ab9 9b5d65a 50e0ab9 39fde06 a8ca669 22d7b97 118aa57 77399ca a8ca669 22d7b97 9187ced 22d7b97 9187ced 22d7b97 9187ced 22d7b97 118aa57 9187ced 22d7b97 4f3a64f 6022aed 4f3a64f 22d7b97 39fde06 0e5c445 39fde06 118aa57 26e028a 118aa57 26e028a 118aa57 4846e0f 118aa57 22d7b97 9b99c58 884fe73 26e028a a8ca669 884fe73 9187ced 5acc9fa 9187ced 22d7b97 59bfb21 22d7b97 830b0b6 73a5c0d 22d7b97 5c9a37f 22d7b97 1ae93cf dddb85f 1ae93cf 22d7b97 6022aed c30d191 992a8de 9405754 3890de7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 |
# template used in production for HuggingChat.
MODELS=`[
{
"name" : "mistralai/Mixtral-8x7B-Instruct-v0.1",
"description" : "The latest MoE model from Mistral AI! 8x7B and outperforms Llama 2 70B in most benchmarks.",
"websiteUrl" : "https://mistral.ai/news/mixtral-of-experts/",
"preprompt" : "",
"chatPromptTemplate": "<s> {{#each messages}}{{#ifUser}}[INST]{{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}} {{content}} [/INST]{{/ifUser}}{{#ifAssistant}} {{content}}</s> {{/ifAssistant}}{{/each}}",
"parameters" : {
"temperature" : 0.6,
"top_p" : 0.95,
"repetition_penalty" : 1.2,
"top_k" : 50,
"truncate" : 24576,
"max_new_tokens" : 8192,
"stop" : ["</s>"]
},
"promptExamples" : [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
},
{
"name": "meta-llama/Llama-2-70b-chat-hf",
"description": "The latest and biggest model from Meta, fine-tuned for chat.",
"websiteUrl": "https://ai.meta.com/llama/",
"preprompt": " ",
"chatPromptTemplate" : "<s>[INST] <<SYS>>\n{{preprompt}}\n<</SYS>>\n\n{{#each messages}}{{#ifUser}}{{content}} [/INST] {{/ifUser}}{{#ifAssistant}}{{content}} </s><s>[INST] {{/ifAssistant}}{{/each}}",
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
],
"parameters": {
"temperature": 0.1,
"top_p": 0.95,
"repetition_penalty": 1.2,
"top_k": 50,
"truncate": 3072,
"max_new_tokens": 1024,
"stop" : ["</s>", "</s><s>[INST]"]
}
},
{
"name" : "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
"description" : "Nous Hermes 2 Mixtral 8x7B DPO is the new flagship Nous Research model trained over the Mixtral 8x7B MoE LLM.",
"websiteUrl" : "https://nousresearch.com/",
"chatPromptTemplate" : "{{#if @root.preprompt}}<|im_start|>system\n{{@root.preprompt}}<|im_end|>\n{{/if}}{{#each messages}}{{#ifUser}}<|im_start|>user\n{{content}}<|im_end|>\n<|im_start|>assistant\n{{/ifUser}}{{#ifAssistant}}{{content}}<|im_end|>\n{{/ifAssistant}}{{/each}}",
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
],
"parameters": {
"temperature": 0.7,
"top_p": 0.95,
"repetition_penalty": 1,
"top_k": 50,
"truncate": 24576,
"max_new_tokens": 2048,
"stop": ["<|im_end|>"]
}
},
{
"name": "codellama/CodeLlama-70b-Instruct-hf",
"displayName": "codellama/CodeLlama-70b-Instruct-hf",
"description": "Code Llama, a state of the art code model from Meta. Now in 70B!",
"websiteUrl": "https://ai.meta.com/blog/code-llama-large-language-model-coding/",
"preprompt": "",
"chatPromptTemplate" : "<s>{{#if @root.preprompt}}Source: system\n\n {{@root.preprompt}} <step> {{/if}}{{#each messages}}{{#ifUser}}Source: user\n\n {{content}} <step> {{/ifUser}}{{#ifAssistant}}Source: assistant\n\n {{content}} <step> {{/ifAssistant}}{{/each}}Source: assistant\nDestination: user\n\n ",
"promptExamples": [
{
"title": "Fibonacci in Python",
"prompt": "Write a python function to calculate the nth fibonacci number."
}, {
"title": "JavaScript promises",
"prompt": "How can I wait for multiple JavaScript promises to fulfill before doing something with their values?"
}, {
"title": "Rust filesystem",
"prompt": "How can I load a file from disk in Rust?"
}
],
"parameters": {
"temperature": 0.1,
"top_p": 0.95,
"repetition_penalty": 1.2,
"top_k": 50,
"truncate": 4096,
"max_new_tokens": 4096,
"stop": ["<step>", " <step>", " <step> "],
}
},
{
"name": "mistralai/Mistral-7B-Instruct-v0.1",
"displayName": "mistralai/Mistral-7B-Instruct-v0.1",
"description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
"websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
"preprompt": "",
"chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}",
"parameters": {
"temperature": 0.1,
"top_p": 0.95,
"repetition_penalty": 1.2,
"top_k": 50,
"truncate": 3072,
"max_new_tokens": 1024,
"stop": ["</s>"]
},
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
],
"unlisted": true
},
{
"name": "mistralai/Mistral-7B-Instruct-v0.2",
"displayName": "mistralai/Mistral-7B-Instruct-v0.2",
"description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
"websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
"preprompt": "",
"chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}",
"parameters": {
"temperature": 0.3,
"top_p": 0.95,
"repetition_penalty": 1.2,
"top_k": 50,
"truncate": 3072,
"max_new_tokens": 1024,
"stop": ["</s>"]
},
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
},
{
"name": "openchat/openchat-3.5-0106",
"displayName": "openchat/openchat-3.5-0106",
"description": "OpenChat 3.5 is the #1 model on MT-Bench, with only 7B parameters.",
"websiteUrl": "https://huggingface.co/openchat/openchat-3.5-0106",
"preprompt": "",
"chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}GPT4 Correct User: {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}}<|end_of_turn|>GPT4 Correct Assistant:{{/ifUser}}{{#ifAssistant}}{{content}}<|end_of_turn|>{{/ifAssistant}}{{/each}}",
"parameters": {
"temperature": 0.6,
"top_p": 0.95,
"repetition_penalty": 1.2,
"top_k": 50,
"truncate": 6016,
"max_new_tokens": 2048,
"stop": ["<|end_of_turn|>"]
},
"promptExamples": [
{
"title": "Write an email from bullet list",
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
}, {
"title": "Code a snake game",
"prompt": "Code a basic snake game in python, give explanations for each step."
}, {
"title": "Assist in a task",
"prompt": "How do I make a delicious lemon cheesecake?"
}
]
}
]`
OLD_MODELS=`[
{"name":"bigcode/starcoder"},
{"name":"OpenAssistant/oasst-sft-6-llama-30b-xor"},
{"name":"HuggingFaceH4/zephyr-7b-alpha"},
{"name":"openchat/openchat_3.5"},
{"name":"openchat/openchat-3.5-1210"},
{"name": "tiiuae/falcon-180B-chat"},
{"name": "codellama/CodeLlama-34b-Instruct-hf"}
]`
TASK_MODEL='mistralai/Mistral-7B-Instruct-v0.1'
APP_BASE="/chat"
PUBLIC_ORIGIN=https://huggingface.co
PUBLIC_SHARE_PREFIX=https://hf.co/chat
PUBLIC_ANNOUNCEMENT_BANNERS=`[]`
PUBLIC_APP_NAME=HuggingChat
PUBLIC_APP_ASSETS=huggingchat
PUBLIC_APP_COLOR=yellow
PUBLIC_APP_DESCRIPTION="Making the community's best AI chat models available to everyone."
PUBLIC_APP_DISCLAIMER_MESSAGE="Disclaimer: AI is an area of active research with known problems such as biased generation and misinformation. Do not use this application for high-stakes decisions or advice."
PUBLIC_APP_DATA_SHARING=1
PUBLIC_APP_DISCLAIMER=1
RATE_LIMIT=16
MESSAGES_BEFORE_LOGIN=5# how many messages a user can send in a conversation before having to login. set to 0 to force login right away
PUBLIC_GOOGLE_ANALYTICS_ID=G-8Q63TH4CSL
PUBLIC_PLAUSIBLE_SCRIPT_URL="/js/script.js"
# Not part of the .env but set as other variables in the space
# ADDRESS_HEADER=X-Forwarded-For
# XFF_DEPTH=2
ENABLE_ASSISTANTS=true
EXPOSE_API=true
ALTERNATIVE_REDIRECT_URLS=`[
huggingchat://login/callback
]`
|