ok
Browse files- config.yaml +5 -5
config.yaml
CHANGED
@@ -2,23 +2,23 @@ model_list:
|
|
2 |
- model_name: gpt-4o
|
3 |
litellm_params:
|
4 |
model: github/gpt-4o
|
5 |
-
api_base: https://models.inference.ai.azure.com
|
6 |
api_key: "os.environ/GITHUB_API_KEY"
|
7 |
- model_name: gpt-4o-mini
|
8 |
litellm_params:
|
9 |
model: github/gpt-4o-mini
|
10 |
-
api_base: https://models.inference.ai.azure.com
|
11 |
api_key: "os.environ/GITHUB_API_KEY"
|
12 |
- model_name: meta-llama-3.1-405b-instruct
|
13 |
litellm_params:
|
14 |
model: github/meta-llama-3.1-405b-instruct
|
15 |
-
api_base: https://models.inference.ai.azure.com
|
16 |
api_key: "os.environ/GITHUB_API_KEY"
|
17 |
- model_name: meta-llama-3.1-8b-instruct
|
18 |
litellm_params:
|
19 |
model: github/meta-llama-3.1-8b-instruct
|
20 |
-
api_base: https://models.inference.ai.azure.com
|
21 |
api_key: "os.environ/GITHUB_API_KEY"
|
22 |
|
|
|
|
|
|
|
23 |
litellm_settings:
|
24 |
-
|
|
|
|
2 |
- model_name: gpt-4o
|
3 |
litellm_params:
|
4 |
model: github/gpt-4o
|
|
|
5 |
api_key: "os.environ/GITHUB_API_KEY"
|
6 |
- model_name: gpt-4o-mini
|
7 |
litellm_params:
|
8 |
model: github/gpt-4o-mini
|
|
|
9 |
api_key: "os.environ/GITHUB_API_KEY"
|
10 |
- model_name: meta-llama-3.1-405b-instruct
|
11 |
litellm_params:
|
12 |
model: github/meta-llama-3.1-405b-instruct
|
|
|
13 |
api_key: "os.environ/GITHUB_API_KEY"
|
14 |
- model_name: meta-llama-3.1-8b-instruct
|
15 |
litellm_params:
|
16 |
model: github/meta-llama-3.1-8b-instruct
|
|
|
17 |
api_key: "os.environ/GITHUB_API_KEY"
|
18 |
|
19 |
+
general_settings:
|
20 |
+
proxy_batch_write_at: 60
|
21 |
+
|
22 |
litellm_settings:
|
23 |
+
set_verbose: False
|
24 |
+
json_logs: True
|