File size: 1,957 Bytes
1a68d03
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58f61a8
1a68d03
 
 
 
 
 
 
 
b86c5c1
 
 
 
 
 
 
 
 
 
 
 
 
 
f6094e9
 
b86c5c1
1a68d03
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
# Configuration version (required)
version: 1.0.9

# Cache settings: Set to true to enable caching
cache: true
fileStrategy: "firebase"

# Definition of custom endpoints
endpoints:
  custom:
  
    # OpenRouter.ai Example
    - name: "Reverse Proxy"
      # For `apiKey` and `baseURL`, you can use environment variables that you define.
      # recommended environment variables:
      # Known issue: you should not use `OPENROUTER_API_KEY` as it will then override the `openAI` endpoint to use OpenRouter as well.
      apiKey: "user_provided"
      baseURL: "user_provided"
      models:
        default: ["claude-3-5-sonnet-20240620", "claude-3-opus-20240229", "gpt-4-vision-preview", "gpt-4", "gpt-4o", "gpt-4-1106-preview", "gpt-4-0125-preview", "gpt-4-turbo", "gpt-3.5-turbo", "gpt-3.5-turbo-0613", "gpt-4-32k", "gpt-4-0314", "gpt-4-0613", "chatglm_pro", "chatglm_lite", "glm-4"]
        fetch: true
      titleConvo: false
      titleModel: "gpt-3.5-turbo"
      summarize: false
      summaryModel: "gpt-3.5-turbo"
      forcePrompt: false
      modelDisplayLabel: "AI"

    - name: "Daifuku"
      # For `apiKey` and `baseURL`, you can use environment variables that you define.
      # recommended environment variables:
      # Known issue: you should not use `OPENROUTER_API_KEY` as it will then override the `openAI` endpoint to use OpenRouter as well.
      apiKey: "user_provided"
      baseURL: "user_provided"
      models:
        default: ["gpt-4", "gpt-4o", "gpt-4-1106-preview", "gpt-4-0125-preview", "gpt-4-turbo-2024-04-09", "gpt-3.5-turbo", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k", "gpt-4-vision-preview"]
        fetch: true
      titleConvo: true
      titleModel: "gpt-3.5-turbo"
      summarize: false
      summaryModel: "gpt-3.5-turbo"
      forcePrompt: false
      modelDisplayLabel: "daifuku"


# See the Custom Configuration Guide for more information:
# https://docs.librechat.ai/install/configuration/custom_config.html