File size: 3,153 Bytes
1a68d03 b679192 1a68d03 58f61a8 1a68d03 b679192 1a68d03 b86c5c1 f6094e9 b679192 b86c5c1 1a68d03 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
# Configuration version (required)
version: 1.0.9
# Cache settings: Set to true to enable caching
cache: true
fileStrategy: "firebase"
# Definition of custom endpoints
endpoints:
custom:
# OpenRouter.ai Example
- name: "Reverse Proxy1"
# For `apiKey` and `baseURL`, you can use environment variables that you define.
# recommended environment variables:
# Known issue: you should not use `OPENROUTER_API_KEY` as it will then override the `openAI` endpoint to use OpenRouter as well.
apiKey: "user_provided"
baseURL: "user_provided"
models:
default: ["claude-3-5-sonnet-20240620", "claude-3-opus-20240229", "gpt-4-vision-preview", "gpt-4", "gpt-4o", "gpt-4-1106-preview", "gpt-4-0125-preview", "gpt-4-turbo", "gpt-3.5-turbo", "gpt-3.5-turbo-0613", "gpt-4-32k", "gpt-4-0314", "gpt-4-0613", "chatglm_pro", "chatglm_lite", "glm-4"]
fetch: true
titleConvo: false
titleModel: "gpt-3.5-turbo"
summarize: false
summaryModel: "gpt-3.5-turbo"
forcePrompt: false
modelDisplayLabel: "Custom1"
- name: "Daifuku"
# For `apiKey` and `baseURL`, you can use environment variables that you define.
# recommended environment variables:
# Known issue: you should not use `OPENROUTER_API_KEY` as it will then override the `openAI` endpoint to use OpenRouter as well.
apiKey: "user_provided"
baseURL: "user_provided"
models:
default: ["gpt-4", "gpt-4o", "gpt-4-1106-preview", "gpt-4-0125-preview", "gpt-4-turbo-2024-04-09", "gpt-3.5-turbo", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k", "gpt-4-vision-preview"]
fetch: true
titleConvo: true
titleModel: "gpt-3.5-turbo"
summarize: false
summaryModel: "gpt-3.5-turbo"
forcePrompt: false
modelDisplayLabel: "daifuku"
# OpenRouter.ai Example
- name: "Siliconflow Nvida"
apiKey: "user_provided"
baseURL: "https://api.siliconflow.cn/v1"
models:
default: ["nvidia/Llama-3.1-Nemotron-70B-Instruct"]
fetch: true
titleConvo: true
titleModel: "Qwen/Qwen2.5-7B-Instruct"
summarize: false
summaryModel: "Qwen/Qwen2.5-7B-Instruct"
forcePrompt: false
modelDisplayLabel: "siliconflow"
# OpenRouter.ai Example
- name: "Reverse Proxy2"
apiKey: "user_provided"
baseURL: "user_provided"
models:
default: ["gpt-3.5-turbo","gpt-3.5-turbo-16k","gpt-4","gpt-4-0125-preview","gpt-4-0613","gpt-4-1106-preview","gpt-4-turbo-2024-04-09","gpt-4-turbo","gpt-4o","gpt-4o-mini-2024-07-18","gpt-4o-mini","chatgpt-4o-latest","claude-3-5-sonnet-20240620","claude-3-haiku-20240307","o1-mini","o1-mini-2024-09-12","o1-preview","o1-preview-2024-09-12","claude-3-haiku","claude-3-sonnet","claude-3-opus","claude-3-opus-20240229"]
fetch: true
titleConvo: false
titleModel: "gpt-3.5-turbo"
summarize: false
summaryModel: "gpt-3.5-turbo"
forcePrompt: false
modelDisplayLabel: "Custom2"
# See the Custom Configuration Guide for more information:
# https://docs.librechat.ai/install/configuration/custom_config.html |