Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,18 +3,26 @@ import gradio as gr
|
|
3 |
from openai import OpenAI
|
4 |
from typing import List, Tuple
|
5 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
# Define available models
|
7 |
AVAILABLE_MODELS = {
|
8 |
-
"
|
9 |
-
"
|
|
|
|
|
|
|
|
|
10 |
}
|
11 |
|
12 |
-
PX_ENDPOINT_URL = "https://api.perplexity.ai"
|
13 |
-
PX_API_KEY = os.getenv('PX_KEY')
|
14 |
-
PASSWORD = os.getenv("PASSWD") # Store the password in an environment variable
|
15 |
-
|
16 |
-
px_client = OpenAI(base_url=PX_ENDPOINT_URL, api_key=PX_API_KEY)
|
17 |
-
|
18 |
def respond(
|
19 |
message: str,
|
20 |
history: List[Tuple[str, str]],
|
@@ -40,9 +48,11 @@ def respond(
|
|
40 |
response = ""
|
41 |
citations = []
|
42 |
|
|
|
|
|
43 |
try:
|
44 |
-
stream =
|
45 |
-
model=AVAILABLE_MODELS[model_choice],
|
46 |
messages=messages,
|
47 |
max_tokens=max_tokens,
|
48 |
temperature=temperature,
|
|
|
3 |
from openai import OpenAI
|
4 |
from typing import List, Tuple
|
5 |
|
6 |
+
CLIENTS = [
|
7 |
+
"perplexity":{"key":os.getenv('PX_KEY'),"endpoint":"https://api.perplexity.ai"},
|
8 |
+
"hyperbolic":{"key":os.getenv('HYPERBOLIC_XYZ_KEY'),"endpoint":"https://api.hyperbolic.xyz/v1"},
|
9 |
+
"huggingface":{"key":os.getenv('HF_KEY'),"endpoint":"https://huggingface.co/api/inference-proxy/together"},
|
10 |
+
]
|
11 |
+
for client_type in CLIENTS:
|
12 |
+
CLIENTS[client_type]["client"] = OpenAI(base_url=CLIENTS[client_type]["endpoint"], api_key=CLIENTS[client_type]["key"])
|
13 |
+
|
14 |
+
PASSWORD = os.getenv("PASSWD")
|
15 |
+
|
16 |
# Define available models
|
17 |
AVAILABLE_MODELS = {
|
18 |
+
"DeepSeek V3 (Hyperbolic.xyz)": {"model_name":"deepseek-ai/DeepSeek-V3","type":"hyperbolic"},
|
19 |
+
"DeepSeek V3 (HuggingFace.co)": {"model_name":"deepseek-ai/DeepSeek-V3","type":"huggingface"},
|
20 |
+
"Llama3.3-70b-Instruct": {"model_name":"meta-llama/Llama-3.3-70B-Instruct","type":"hyperbolic"},
|
21 |
+
"Llama3.1-8b-Instruct": {"model_name":"meta-llama/Meta-Llama-3.1-8B-Instruct","type":"hyperbolic"},
|
22 |
+
"Sonar Pro": {"model_name":"sonar-pro","type":"perplexity"},
|
23 |
+
"Sonar": {"model_name":"sonar","type":"perplexity"},
|
24 |
}
|
25 |
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
def respond(
|
27 |
message: str,
|
28 |
history: List[Tuple[str, str]],
|
|
|
48 |
response = ""
|
49 |
citations = []
|
50 |
|
51 |
+
selected_client = CLIENTS[AVAILABLE_MODELS[model_choice]["type"]]
|
52 |
+
|
53 |
try:
|
54 |
+
stream = selected_client.chat.completions.create(
|
55 |
+
model=AVAILABLE_MODELS[model_choice][model_name],
|
56 |
messages=messages,
|
57 |
max_tokens=max_tokens,
|
58 |
temperature=temperature,
|