Guru-25 commited on
Commit
ced84a5
·
verified ·
1 Parent(s): fc37186

Update librechat.yaml

Browse files
Files changed (1) hide show
  1. librechat.yaml +17 -16
librechat.yaml CHANGED
@@ -1,4 +1,4 @@
1
- version: 1.2.1
2
 
3
  cache: true
4
 
@@ -32,6 +32,18 @@ modelSpecs:
32
  topK: 40
33
  topP: 0.95
34
  maxOutputTokens: 8192
 
 
 
 
 
 
 
 
 
 
 
 
35
  - name: "o3-mini"
36
  label: "o3-mini - 12"
37
  iconURL: "https://cdn.jsdelivr.net/gh/Guru-25/Nothing/librechat/chatgpt.png"
@@ -39,7 +51,7 @@ modelSpecs:
39
  modelLabel: "o3-mini"
40
  endpoint: "Github Models"
41
  model: "o3-mini"
42
- max_completion_tokens: 100000
43
  reasoning_effort: "high"
44
  - name: "o1"
45
  label: "o1 - 8*"
@@ -48,7 +60,7 @@ modelSpecs:
48
  modelLabel: "o1"
49
  endpoint: "Github Models"
50
  model: "o1"
51
- max_completion_tokens: 40000
52
  reasoning_effort: "high"
53
  - name: "deepseek-r1"
54
  label: "DeepSeek R1 - 8"
@@ -58,18 +70,6 @@ modelSpecs:
58
  endpoint: "Github Models"
59
  model: "deepseek-r1"
60
  max_tokens: 2048
61
- - name: "deepseek-v3"
62
- label: "DeepSeek V3 - 50*"
63
- iconURL: "https://cdn.jsdelivr.net/gh/Guru-25/Nothing/librechat/deepseek.svg"
64
- preset:
65
- modelLabel: "DeepSeek V3"
66
- endpoint: "Github Models"
67
- model: "deepseek-v3"
68
- max_completion_tokens: 2048
69
- temperature: 0.8
70
- top_p: 0.1
71
- presence_penalty: 0
72
- frequency_penalty: 0
73
  - name: "deepseek-r1-openrouter"
74
  label: "DeepSeek R1 (OpenRouter) - 200*"
75
  iconURL: "https://cdn.jsdelivr.net/gh/Guru-25/Nothing/librechat/deepseek.svg"
@@ -123,8 +123,9 @@ endpoints:
123
  models:
124
  default:
125
  - deepseek/deepseek-r1:free
 
126
  titleConvo: true
127
- titleModel: "google/gemini-2.0-pro-exp-02-05:free"
128
 
129
  # groq
130
  # Model list: https://console.groq.com/settings/limits
 
1
+ version: 1.2.3
2
 
3
  cache: true
4
 
 
32
  topK: 40
33
  topP: 0.95
34
  maxOutputTokens: 8192
35
+ - name: "deepseek-v3"
36
+ label: "DeepSeek V3 - 50"
37
+ iconURL: "https://cdn.jsdelivr.net/gh/Guru-25/Nothing/librechat/deepseek.svg"
38
+ preset:
39
+ modelLabel: "DeepSeek V3"
40
+ endpoint: "Github Models"
41
+ model: "deepseek-v3"
42
+ max_tokens: 2048
43
+ temperature: 0.8
44
+ top_p: 0.1
45
+ presence_penalty: 0
46
+ frequency_penalty: 0
47
  - name: "o3-mini"
48
  label: "o3-mini - 12"
49
  iconURL: "https://cdn.jsdelivr.net/gh/Guru-25/Nothing/librechat/chatgpt.png"
 
51
  modelLabel: "o3-mini"
52
  endpoint: "Github Models"
53
  model: "o3-mini"
54
+ max_tokens: 100000
55
  reasoning_effort: "high"
56
  - name: "o1"
57
  label: "o1 - 8*"
 
60
  modelLabel: "o1"
61
  endpoint: "Github Models"
62
  model: "o1"
63
+ max_tokens: 40000
64
  reasoning_effort: "high"
65
  - name: "deepseek-r1"
66
  label: "DeepSeek R1 - 8"
 
70
  endpoint: "Github Models"
71
  model: "deepseek-r1"
72
  max_tokens: 2048
 
 
 
 
 
 
 
 
 
 
 
 
73
  - name: "deepseek-r1-openrouter"
74
  label: "DeepSeek R1 (OpenRouter) - 200*"
75
  iconURL: "https://cdn.jsdelivr.net/gh/Guru-25/Nothing/librechat/deepseek.svg"
 
123
  models:
124
  default:
125
  - deepseek/deepseek-r1:free
126
+ - google/gemini-2.0-flash-exp:free
127
  titleConvo: true
128
+ titleModel: "google/gemini-2.0-flash-exp:free"
129
 
130
  # groq
131
  # Model list: https://console.groq.com/settings/limits