Update app.py
Browse files
app.py
CHANGED
@@ -74,6 +74,7 @@ COHERE_API_KEY = os.environ.get("COHERE_API_KEY", "")
|
|
74 |
HF_API_KEY = os.environ.get("HF_API_KEY", "")
|
75 |
TOGETHER_API_KEY = os.environ.get("TOGETHER_API_KEY", "")
|
76 |
GOOGLEAI_API_KEY = os.environ.get("GOOGLEAI_API_KEY", "")
|
|
|
77 |
|
78 |
# Print application startup message with timestamp
|
79 |
current_time = time.strftime("%Y-%m-%d %H:%M:%S")
|
@@ -353,6 +354,25 @@ GOOGLEAI_MODELS = {
|
|
353 |
"gemini-2.5-pro": 2000000,
|
354 |
}
|
355 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
356 |
|
357 |
# Add all models with "vl", "vision", "visual" in their name to HF vision models
|
358 |
for model_name in list(HUGGINGFACE_MODELS.keys()):
|
@@ -681,6 +701,95 @@ def update_model_info(provider, model_name):
|
|
681 |
# API HANDLERS
|
682 |
# ==========================================================
|
683 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
684 |
def call_openrouter_api(payload, api_key_override=None):
|
685 |
"""Make a call to OpenRouter API with error handling"""
|
686 |
try:
|
@@ -894,6 +1003,16 @@ def extract_ai_response(result, provider):
|
|
894 |
elif provider == "OpenAI":
|
895 |
if hasattr(result, "choices") and len(result.choices) > 0:
|
896 |
return result.choices[0].message.content
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
897 |
|
898 |
elif provider == "HuggingFace":
|
899 |
return result.get("generated_text", "")
|
@@ -1462,6 +1581,44 @@ def ask_ai(message, history, provider, model_choice, temperature, max_tokens, to
|
|
1462 |
{"role": "user", "content": message},
|
1463 |
{"role": "assistant", "content": error_message}
|
1464 |
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1465 |
|
1466 |
elif provider == "OpenAI":
|
1467 |
# Process OpenAI similarly as above...
|
@@ -1886,7 +2043,7 @@ def create_app():
|
|
1886 |
gr.Markdown("""
|
1887 |
# 🤖 CrispChat
|
1888 |
|
1889 |
-
Chat with AI models from multiple providers: OpenRouter, OpenAI, HuggingFace, Groq, Cohere, Together,
|
1890 |
""")
|
1891 |
|
1892 |
with gr.Row():
|
@@ -1946,9 +2103,9 @@ def create_app():
|
|
1946 |
with gr.Group(elem_classes="provider-selection"):
|
1947 |
gr.Markdown("### Provider Selection")
|
1948 |
|
1949 |
-
# Provider selection
|
1950 |
provider_choice = gr.Radio(
|
1951 |
-
choices=["OpenRouter", "OpenAI", "HuggingFace", "Groq", "Cohere", "Together", "
|
1952 |
value="OpenRouter",
|
1953 |
label="AI Provider"
|
1954 |
)
|
@@ -1999,6 +2156,14 @@ def create_app():
|
|
1999 |
value=TOGETHER_API_KEY if TOGETHER_API_KEY else ""
|
2000 |
)
|
2001 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2002 |
googleai_api_key = gr.Textbox(
|
2003 |
placeholder="Enter Google AI API key",
|
2004 |
label="Google AI API Key",
|
@@ -2059,25 +2224,18 @@ def create_app():
|
|
2059 |
|
2060 |
together_model = gr.Dropdown(
|
2061 |
choices=list(TOGETHER_MODELS.keys()),
|
2062 |
-
value="meta-llama/Llama-3.1-8B-Instruct" if "meta-llama/Llama-3.1-8B-Instruct" in TOGETHER_MODELS else None,
|
2063 |
label="Together Model",
|
2064 |
elem_id="together-model-choice",
|
2065 |
visible=False
|
2066 |
)
|
2067 |
|
2068 |
-
|
2069 |
-
|
2070 |
-
|
2071 |
-
|
2072 |
-
|
2073 |
-
|
2074 |
-
)
|
2075 |
-
|
2076 |
-
cerebras_model = gr.Dropdown(
|
2077 |
-
choices=list(CEREBRAS_MODELS.keys()),
|
2078 |
-
value="cerebras/llama-3.1-8b" if "cerebras/llama-3.1-8b" in CEREBRAS_MODELS else None,
|
2079 |
-
label="Cerebras Model",
|
2080 |
-
elem_id="cerebras-model-choice",
|
2081 |
visible=False
|
2082 |
)
|
2083 |
|
@@ -2253,8 +2411,7 @@ def create_app():
|
|
2253 |
- **Groq**: High-performance inference, requires API key
|
2254 |
- **Cohere**: Specialized in language understanding, requires API key
|
2255 |
- **Together**: Access to high-quality open models, requires API key
|
2256 |
-
- **
|
2257 |
-
- **Cerebras**: Free tier available with 8K context limit
|
2258 |
- **GoogleAI**: Google's Gemini models, requires API key
|
2259 |
|
2260 |
## Advanced Parameters
|
@@ -2267,7 +2424,7 @@ def create_app():
|
|
2267 |
# Add a footer with version info
|
2268 |
footer_md = gr.Markdown("""
|
2269 |
---
|
2270 |
-
### CrispChat v1.
|
2271 |
Built with ❤️ using Gradio and multiple AI provider APIs | Context sizes shown next to model names
|
2272 |
""")
|
2273 |
|
@@ -2281,12 +2438,11 @@ def create_app():
|
|
2281 |
groq_model: gr.update(visible=(provider == "Groq")),
|
2282 |
cohere_model: gr.update(visible=(provider == "Cohere")),
|
2283 |
together_model: gr.update(visible=(provider == "Together")),
|
2284 |
-
|
2285 |
-
cerebras_model: gr.update(visible=(provider == "Cerebras")),
|
2286 |
googleai_model: gr.update(visible=(provider == "GoogleAI"))
|
2287 |
}
|
2288 |
|
2289 |
-
def update_context_for_provider(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model,
|
2290 |
"""Update context display based on selected provider and model"""
|
2291 |
if provider == "OpenRouter":
|
2292 |
return update_context_display(provider, openrouter_model)
|
@@ -2300,15 +2456,13 @@ def create_app():
|
|
2300 |
return update_context_display(provider, cohere_model)
|
2301 |
elif provider == "Together":
|
2302 |
return update_context_display(provider, together_model)
|
2303 |
-
elif provider == "
|
2304 |
-
return update_context_display(provider,
|
2305 |
-
elif provider == "Cerebras":
|
2306 |
-
return update_context_display(provider, cerebras_model)
|
2307 |
elif provider == "GoogleAI":
|
2308 |
return update_context_display(provider, googleai_model)
|
2309 |
return "Unknown"
|
2310 |
|
2311 |
-
def update_model_info_for_provider(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model,
|
2312 |
"""Update model info based on selected provider and model"""
|
2313 |
if provider == "OpenRouter":
|
2314 |
return update_model_info(provider, openrouter_model)
|
@@ -2322,10 +2476,8 @@ def create_app():
|
|
2322 |
return update_model_info(provider, cohere_model)
|
2323 |
elif provider == "Together":
|
2324 |
return update_model_info(provider, together_model)
|
2325 |
-
elif provider == "
|
2326 |
-
return update_model_info(provider,
|
2327 |
-
elif provider == "Cerebras":
|
2328 |
-
return update_model_info(provider, cerebras_model)
|
2329 |
elif provider == "GoogleAI":
|
2330 |
return update_model_info(provider, googleai_model)
|
2331 |
return "<p>Model information not available</p>"
|
@@ -2432,35 +2584,18 @@ def create_app():
|
|
2432 |
default_model = "meta-llama/Llama-3.1-8B-Instruct" if "meta-llama/Llama-3.1-8B-Instruct" in all_models else all_models[0] if all_models else None
|
2433 |
return gr.update(choices=all_models, value=default_model)
|
2434 |
|
2435 |
-
def
|
2436 |
-
"""Filter
|
2437 |
-
all_models = list(
|
2438 |
-
if not search_term:
|
2439 |
-
default_model = "ovh/llama-3.1-8b-instruct" if "ovh/llama-3.1-8b-instruct" in all_models else all_models[0] if all_models else None
|
2440 |
-
return gr.update(choices=all_models, value=default_model)
|
2441 |
-
|
2442 |
-
filtered_models = [model for model in all_models if search_term.lower() in model.lower()]
|
2443 |
-
|
2444 |
-
if filtered_models:
|
2445 |
-
return gr.update(choices=filtered_models, value=filtered_models[0])
|
2446 |
-
else:
|
2447 |
-
default_model = "ovh/llama-3.1-8b-instruct" if "ovh/llama-3.1-8b-instruct" in all_models else all_models[0] if all_models else None
|
2448 |
-
return gr.update(choices=all_models, value=default_model)
|
2449 |
-
|
2450 |
-
def search_cerebras_models(search_term):
|
2451 |
-
"""Filter Cerebras models based on search term"""
|
2452 |
-
all_models = list(CEREBRAS_MODELS.keys())
|
2453 |
if not search_term:
|
2454 |
-
|
2455 |
-
return gr.update(choices=all_models, value=default_model)
|
2456 |
|
2457 |
filtered_models = [model for model in all_models if search_term.lower() in model.lower()]
|
2458 |
|
2459 |
if filtered_models:
|
2460 |
return gr.update(choices=filtered_models, value=filtered_models[0])
|
2461 |
else:
|
2462 |
-
|
2463 |
-
return gr.update(choices=all_models, value=default_model)
|
2464 |
|
2465 |
def search_googleai_models(search_term):
|
2466 |
"""Filter GoogleAI models based on search term"""
|
@@ -2478,26 +2613,25 @@ def create_app():
|
|
2478 |
return gr.update(choices=all_models, value=default_model)
|
2479 |
|
2480 |
|
2481 |
-
def get_current_model(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model,
|
|
|
2482 |
"""Get the currently selected model based on provider"""
|
2483 |
if provider == "OpenRouter":
|
2484 |
-
return openrouter_model
|
2485 |
elif provider == "OpenAI":
|
2486 |
-
return openai_model
|
2487 |
elif provider == "HuggingFace":
|
2488 |
-
return hf_model
|
2489 |
elif provider == "Groq":
|
2490 |
-
return groq_model
|
2491 |
elif provider == "Cohere":
|
2492 |
-
return cohere_model
|
2493 |
elif provider == "Together":
|
2494 |
-
return together_model
|
2495 |
-
elif provider == "
|
2496 |
-
return
|
2497 |
-
elif provider == "Cerebras":
|
2498 |
-
return cerebras_model if cerebras_model else "cerebras/llama-3.1-8b" if "cerebras/llama-3.1-8b" in CEREBRAS_MODELS else None
|
2499 |
elif provider == "GoogleAI":
|
2500 |
-
return googleai_model
|
2501 |
return None
|
2502 |
|
2503 |
|
@@ -2519,29 +2653,28 @@ def create_app():
|
|
2519 |
groq_model,
|
2520 |
cohere_model,
|
2521 |
together_model,
|
2522 |
-
|
2523 |
-
cerebras_model,
|
2524 |
googleai_model
|
2525 |
]
|
2526 |
).then(
|
2527 |
fn=update_context_for_provider,
|
2528 |
-
inputs=[provider_choice, openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model,
|
2529 |
outputs=context_display
|
2530 |
).then(
|
2531 |
fn=update_model_info_for_provider,
|
2532 |
-
inputs=[provider_choice, openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model,
|
2533 |
outputs=model_info_display
|
2534 |
).then(
|
2535 |
fn=lambda provider, model: update_vision_indicator(
|
2536 |
provider,
|
2537 |
-
get_current_model(provider, model, None, None, None, None, None, None, None
|
2538 |
),
|
2539 |
inputs=[provider_choice, openrouter_model],
|
2540 |
outputs=is_vision_indicator
|
2541 |
).then(
|
2542 |
fn=lambda provider, model: update_image_upload_visibility(
|
2543 |
provider,
|
2544 |
-
get_current_model(provider, model, None, None, None, None, None, None, None
|
2545 |
),
|
2546 |
inputs=[provider_choice, openrouter_model],
|
2547 |
outputs=image_upload_container
|
@@ -2556,14 +2689,13 @@ def create_app():
|
|
2556 |
search_groq_models(search) if provider == "Groq" else gr.update(),
|
2557 |
search_cohere_models(search) if provider == "Cohere" else gr.update(),
|
2558 |
search_together_models(search) if provider == "Together" else gr.update(),
|
2559 |
-
|
2560 |
-
search_cerebras_models(search) if provider == "Cerebras" else gr.update(),
|
2561 |
search_googleai_models(search) if provider == "GoogleAI" else gr.update()
|
2562 |
],
|
2563 |
inputs=[provider_choice, model_search],
|
2564 |
outputs=[
|
2565 |
openrouter_model, openai_model, hf_model, groq_model,
|
2566 |
-
cohere_model, together_model,
|
2567 |
]
|
2568 |
)
|
2569 |
|
@@ -2676,39 +2808,21 @@ def create_app():
|
|
2676 |
outputs=image_upload_container
|
2677 |
)
|
2678 |
|
2679 |
-
|
2680 |
-
fn=lambda model: update_context_display("
|
2681 |
-
inputs=
|
2682 |
-
outputs=context_display
|
2683 |
-
).then(
|
2684 |
-
fn=lambda model: update_model_info("OVH", model),
|
2685 |
-
inputs=ovh_model,
|
2686 |
-
outputs=model_info_display
|
2687 |
-
).then(
|
2688 |
-
fn=lambda model: update_vision_indicator("OVH", model),
|
2689 |
-
inputs=ovh_model,
|
2690 |
-
outputs=is_vision_indicator
|
2691 |
-
).then(
|
2692 |
-
fn=lambda model: update_image_upload_visibility("OVH", model),
|
2693 |
-
inputs=ovh_model,
|
2694 |
-
outputs=image_upload_container
|
2695 |
-
)
|
2696 |
-
|
2697 |
-
cerebras_model.change(
|
2698 |
-
fn=lambda model: update_context_display("Cerebras", model),
|
2699 |
-
inputs=cerebras_model,
|
2700 |
outputs=context_display
|
2701 |
).then(
|
2702 |
-
fn=lambda model: update_model_info("
|
2703 |
-
inputs=
|
2704 |
outputs=model_info_display
|
2705 |
).then(
|
2706 |
-
fn=lambda model: update_vision_indicator("
|
2707 |
-
inputs=
|
2708 |
outputs=is_vision_indicator
|
2709 |
).then(
|
2710 |
-
fn=lambda model: update_image_upload_visibility("
|
2711 |
-
inputs=
|
2712 |
outputs=image_upload_container
|
2713 |
)
|
2714 |
|
@@ -2744,25 +2858,24 @@ def create_app():
|
|
2744 |
return search_cohere_models(search_term)
|
2745 |
elif provider == "Together":
|
2746 |
return search_together_models(search_term)
|
2747 |
-
elif provider == "
|
2748 |
-
return
|
2749 |
-
elif provider == "Cerebras":
|
2750 |
-
return search_cerebras_models(search_term)
|
2751 |
elif provider == "GoogleAI":
|
2752 |
return search_googleai_models(search_term)
|
2753 |
return None
|
2754 |
|
2755 |
# Set up submission event
|
2756 |
def submit_message(message, history, provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model,
|
2757 |
-
|
2758 |
-
|
2759 |
-
|
2760 |
-
|
2761 |
-
|
|
|
2762 |
"""Submit message to selected provider and model"""
|
2763 |
# Get the currently selected model
|
2764 |
model_choice = get_current_model(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model,
|
2765 |
-
|
2766 |
|
2767 |
# Check if model is selected
|
2768 |
if not model_choice:
|
@@ -2786,6 +2899,8 @@ def create_app():
|
|
2786 |
api_key_override = cohere_api_key
|
2787 |
elif provider == "Together" and together_api_key:
|
2788 |
api_key_override = together_api_key
|
|
|
|
|
2789 |
elif provider == "GoogleAI" and googleai_api_key:
|
2790 |
api_key_override = googleai_api_key
|
2791 |
|
@@ -2820,7 +2935,7 @@ def create_app():
|
|
2820 |
fn=submit_message,
|
2821 |
inputs=[
|
2822 |
message, chatbot, provider_choice,
|
2823 |
-
openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model,
|
2824 |
temperature, max_tokens, top_p, frequency_penalty, presence_penalty, repetition_penalty,
|
2825 |
top_k, min_p, seed, top_a, stream_output, response_format,
|
2826 |
images, documents, reasoning_effort, system_message, transforms,
|
@@ -2839,7 +2954,7 @@ def create_app():
|
|
2839 |
fn=submit_message,
|
2840 |
inputs=[
|
2841 |
message, chatbot, provider_choice,
|
2842 |
-
openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model,
|
2843 |
temperature, max_tokens, top_p, frequency_penalty, presence_penalty, repetition_penalty,
|
2844 |
top_k, min_p, seed, top_a, stream_output, response_format,
|
2845 |
images, documents, reasoning_effort, system_message, transforms,
|
@@ -2875,6 +2990,10 @@ if __name__ == "__main__":
|
|
2875 |
if not OPENROUTER_API_KEY:
|
2876 |
logger.warning("WARNING: OPENROUTER_API_KEY environment variable is not set")
|
2877 |
missing_keys.append("OpenRouter")
|
|
|
|
|
|
|
|
|
2878 |
|
2879 |
if not OPENAI_API_KEY:
|
2880 |
logger.warning("WARNING: OPENAI_API_KEY environment variable is not set")
|
|
|
74 |
HF_API_KEY = os.environ.get("HF_API_KEY", "")
|
75 |
TOGETHER_API_KEY = os.environ.get("TOGETHER_API_KEY", "")
|
76 |
GOOGLEAI_API_KEY = os.environ.get("GOOGLEAI_API_KEY", "")
|
77 |
+
ANTHROPIC_API_KEY = os.environ.get("ANTHROPIC_API_KEY", "")
|
78 |
|
79 |
# Print application startup message with timestamp
|
80 |
current_time = time.strftime("%Y-%m-%d %H:%M:%S")
|
|
|
354 |
"gemini-2.5-pro": 2000000,
|
355 |
}
|
356 |
|
357 |
+
# ANTHROPIC MODELS
|
358 |
+
ANTHROPIC_MODELS = {
|
359 |
+
"claude-3-7-sonnet-20250219": 128000, # Claude 3.7 Sonnet
|
360 |
+
"claude-3-5-sonnet-20241022": 200000, # Claude 3.5 Sonnet
|
361 |
+
"claude-3-5-haiku-20240307": 200000, # Claude 3.5 Haiku
|
362 |
+
"claude-3-opus-20240229": 200000, # Claude 3 Opus
|
363 |
+
"claude-3-haiku-20240307": 200000, # Claude 3 Haiku
|
364 |
+
"claude-3-sonnet-20240229": 200000, # Claude 3 Sonnet
|
365 |
+
}
|
366 |
+
|
367 |
+
# Add Anthropic to the vision models list
|
368 |
+
VISION_MODELS["Anthropic"] = [
|
369 |
+
"claude-3-7-sonnet-20250219",
|
370 |
+
"claude-3-5-sonnet-20241022",
|
371 |
+
"claude-3-opus-20240229",
|
372 |
+
"claude-3-sonnet-20240229",
|
373 |
+
"claude-3-5-haiku-20240307",
|
374 |
+
"claude-3-haiku-20240307"
|
375 |
+
]
|
376 |
|
377 |
# Add all models with "vl", "vision", "visual" in their name to HF vision models
|
378 |
for model_name in list(HUGGINGFACE_MODELS.keys()):
|
|
|
701 |
# API HANDLERS
|
702 |
# ==========================================================
|
703 |
|
704 |
+
def call_anthropic_api(payload, api_key_override=None):
|
705 |
+
"""Make a call to Anthropic API with error handling"""
|
706 |
+
try:
|
707 |
+
# Try to import Anthropic
|
708 |
+
try:
|
709 |
+
import anthropic
|
710 |
+
from anthropic import Anthropic
|
711 |
+
except ImportError:
|
712 |
+
raise ImportError("Anthropic package not installed. Install it with: pip install anthropic")
|
713 |
+
|
714 |
+
api_key = api_key_override if api_key_override else os.environ.get("ANTHROPIC_API_KEY", "")
|
715 |
+
if not api_key:
|
716 |
+
raise ValueError("Anthropic API key is required")
|
717 |
+
|
718 |
+
client = Anthropic(api_key=api_key)
|
719 |
+
|
720 |
+
# Extract parameters from payload
|
721 |
+
model = payload.get("model", "claude-3-5-sonnet-20241022")
|
722 |
+
messages = payload.get("messages", [])
|
723 |
+
temperature = payload.get("temperature", 0.7)
|
724 |
+
max_tokens = payload.get("max_tokens", 1000)
|
725 |
+
|
726 |
+
# Format messages for Anthropic
|
727 |
+
# Find system message if any
|
728 |
+
system_prompt = None
|
729 |
+
chat_messages = []
|
730 |
+
|
731 |
+
for msg in messages:
|
732 |
+
if msg["role"] == "system":
|
733 |
+
system_prompt = msg["content"]
|
734 |
+
else:
|
735 |
+
# Format content
|
736 |
+
if isinstance(msg["content"], list):
|
737 |
+
# Handle multimodal content (images)
|
738 |
+
anthropic_content = []
|
739 |
+
for item in msg["content"]:
|
740 |
+
if item["type"] == "text":
|
741 |
+
anthropic_content.append({
|
742 |
+
"type": "text",
|
743 |
+
"text": item["text"]
|
744 |
+
})
|
745 |
+
elif item["type"] == "image_url":
|
746 |
+
# Extract base64 from data URL if present
|
747 |
+
image_url = item["image_url"]["url"]
|
748 |
+
if image_url.startswith("data:"):
|
749 |
+
# Extract media type and base64 data
|
750 |
+
parts = image_url.split(",", 1)
|
751 |
+
media_type = parts[0].split(":")[1].split(";")[0]
|
752 |
+
base64_data = parts[1]
|
753 |
+
|
754 |
+
anthropic_content.append({
|
755 |
+
"type": "image",
|
756 |
+
"source": {
|
757 |
+
"type": "base64",
|
758 |
+
"media_type": media_type,
|
759 |
+
"data": base64_data
|
760 |
+
}
|
761 |
+
})
|
762 |
+
else:
|
763 |
+
# URL not supported by Anthropic yet
|
764 |
+
anthropic_content.append({
|
765 |
+
"type": "text",
|
766 |
+
"text": f"[Image URL: {image_url}]"
|
767 |
+
})
|
768 |
+
chat_messages.append({
|
769 |
+
"role": msg["role"],
|
770 |
+
"content": anthropic_content
|
771 |
+
})
|
772 |
+
else:
|
773 |
+
# Simple text content
|
774 |
+
chat_messages.append({
|
775 |
+
"role": msg["role"],
|
776 |
+
"content": msg["content"]
|
777 |
+
})
|
778 |
+
|
779 |
+
# Make request to Anthropic
|
780 |
+
response = client.messages.create(
|
781 |
+
model=model,
|
782 |
+
max_tokens=max_tokens,
|
783 |
+
temperature=temperature,
|
784 |
+
system=system_prompt,
|
785 |
+
messages=chat_messages
|
786 |
+
)
|
787 |
+
|
788 |
+
return response
|
789 |
+
except Exception as e:
|
790 |
+
logger.error(f"Anthropic API error: {str(e)}")
|
791 |
+
raise e
|
792 |
+
|
793 |
def call_openrouter_api(payload, api_key_override=None):
|
794 |
"""Make a call to OpenRouter API with error handling"""
|
795 |
try:
|
|
|
1003 |
elif provider == "OpenAI":
|
1004 |
if hasattr(result, "choices") and len(result.choices) > 0:
|
1005 |
return result.choices[0].message.content
|
1006 |
+
|
1007 |
+
elif provider == "Anthropic":
|
1008 |
+
if hasattr(result, "content"):
|
1009 |
+
# Combine text from all content blocks
|
1010 |
+
full_text = ""
|
1011 |
+
for block in result.content:
|
1012 |
+
if block.type == "text":
|
1013 |
+
full_text += block.text
|
1014 |
+
return full_text
|
1015 |
+
return "No content returned from Anthropic"
|
1016 |
|
1017 |
elif provider == "HuggingFace":
|
1018 |
return result.get("generated_text", "")
|
|
|
1581 |
{"role": "user", "content": message},
|
1582 |
{"role": "assistant", "content": error_message}
|
1583 |
]
|
1584 |
+
|
1585 |
+
elif provider == "Anthropic":
|
1586 |
+
# Get model ID from registry
|
1587 |
+
model_id, _ = get_model_info(provider, model_choice)
|
1588 |
+
if not model_id:
|
1589 |
+
error_message = f"Error: Model '{model_choice}' not found in Anthropic"
|
1590 |
+
return history + [
|
1591 |
+
{"role": "user", "content": message},
|
1592 |
+
{"role": "assistant", "content": error_message}
|
1593 |
+
]
|
1594 |
+
|
1595 |
+
# Build Anthropic payload
|
1596 |
+
payload = {
|
1597 |
+
"model": model_id,
|
1598 |
+
"messages": messages,
|
1599 |
+
"temperature": temperature,
|
1600 |
+
"max_tokens": max_tokens
|
1601 |
+
}
|
1602 |
+
|
1603 |
+
# Call Anthropic API
|
1604 |
+
logger.info(f"Sending request to Anthropic model: {model_id}")
|
1605 |
+
|
1606 |
+
try:
|
1607 |
+
response = call_anthropic_api(payload, api_key_override)
|
1608 |
+
|
1609 |
+
# Extract response
|
1610 |
+
ai_response = extract_ai_response(response, provider)
|
1611 |
+
return history + [
|
1612 |
+
{"role": "user", "content": message},
|
1613 |
+
{"role": "assistant", "content": ai_response}
|
1614 |
+
]
|
1615 |
+
except Exception as e:
|
1616 |
+
error_message = f"Anthropic API Error: {str(e)}"
|
1617 |
+
logger.error(error_message)
|
1618 |
+
return history + [
|
1619 |
+
{"role": "user", "content": message},
|
1620 |
+
{"role": "assistant", "content": error_message}
|
1621 |
+
]
|
1622 |
|
1623 |
elif provider == "OpenAI":
|
1624 |
# Process OpenAI similarly as above...
|
|
|
2043 |
gr.Markdown("""
|
2044 |
# 🤖 CrispChat
|
2045 |
|
2046 |
+
Chat with AI models from multiple providers: OpenRouter, OpenAI, HuggingFace, Groq, Cohere, Together, Anthropic, and Google AI.
|
2047 |
""")
|
2048 |
|
2049 |
with gr.Row():
|
|
|
2103 |
with gr.Group(elem_classes="provider-selection"):
|
2104 |
gr.Markdown("### Provider Selection")
|
2105 |
|
2106 |
+
# Provider selection - removed OVH and Cerebras, added Anthropic
|
2107 |
provider_choice = gr.Radio(
|
2108 |
+
choices=["OpenRouter", "OpenAI", "HuggingFace", "Groq", "Cohere", "Together", "Anthropic", "GoogleAI"],
|
2109 |
value="OpenRouter",
|
2110 |
label="AI Provider"
|
2111 |
)
|
|
|
2156 |
value=TOGETHER_API_KEY if TOGETHER_API_KEY else ""
|
2157 |
)
|
2158 |
|
2159 |
+
# Add Anthropic API key
|
2160 |
+
anthropic_api_key = gr.Textbox(
|
2161 |
+
placeholder="Enter Anthropic API key",
|
2162 |
+
label="Anthropic API Key",
|
2163 |
+
type="password",
|
2164 |
+
value=os.environ.get("ANTHROPIC_API_KEY", "")
|
2165 |
+
)
|
2166 |
+
|
2167 |
googleai_api_key = gr.Textbox(
|
2168 |
placeholder="Enter Google AI API key",
|
2169 |
label="Google AI API Key",
|
|
|
2224 |
|
2225 |
together_model = gr.Dropdown(
|
2226 |
choices=list(TOGETHER_MODELS.keys()),
|
2227 |
+
value="meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo" if "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo" in TOGETHER_MODELS else None,
|
2228 |
label="Together Model",
|
2229 |
elem_id="together-model-choice",
|
2230 |
visible=False
|
2231 |
)
|
2232 |
|
2233 |
+
# Add Anthropic model dropdown
|
2234 |
+
anthropic_model = gr.Dropdown(
|
2235 |
+
choices=list(ANTHROPIC_MODELS.keys()),
|
2236 |
+
value="claude-3-5-sonnet-20241022" if "claude-3-5-sonnet-20241022" in ANTHROPIC_MODELS else None,
|
2237 |
+
label="Anthropic Model",
|
2238 |
+
elem_id="anthropic-model-choice",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2239 |
visible=False
|
2240 |
)
|
2241 |
|
|
|
2411 |
- **Groq**: High-performance inference, requires API key
|
2412 |
- **Cohere**: Specialized in language understanding, requires API key
|
2413 |
- **Together**: Access to high-quality open models, requires API key
|
2414 |
+
- **Anthropic**: Claude models with strong reasoning capabilities, requires API key
|
|
|
2415 |
- **GoogleAI**: Google's Gemini models, requires API key
|
2416 |
|
2417 |
## Advanced Parameters
|
|
|
2424 |
# Add a footer with version info
|
2425 |
footer_md = gr.Markdown("""
|
2426 |
---
|
2427 |
+
### CrispChat v1.2
|
2428 |
Built with ❤️ using Gradio and multiple AI provider APIs | Context sizes shown next to model names
|
2429 |
""")
|
2430 |
|
|
|
2438 |
groq_model: gr.update(visible=(provider == "Groq")),
|
2439 |
cohere_model: gr.update(visible=(provider == "Cohere")),
|
2440 |
together_model: gr.update(visible=(provider == "Together")),
|
2441 |
+
anthropic_model: gr.update(visible=(provider == "Anthropic")),
|
|
|
2442 |
googleai_model: gr.update(visible=(provider == "GoogleAI"))
|
2443 |
}
|
2444 |
|
2445 |
+
def update_context_for_provider(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, anthropic_model, googleai_model):
|
2446 |
"""Update context display based on selected provider and model"""
|
2447 |
if provider == "OpenRouter":
|
2448 |
return update_context_display(provider, openrouter_model)
|
|
|
2456 |
return update_context_display(provider, cohere_model)
|
2457 |
elif provider == "Together":
|
2458 |
return update_context_display(provider, together_model)
|
2459 |
+
elif provider == "Anthropic":
|
2460 |
+
return update_context_display(provider, anthropic_model)
|
|
|
|
|
2461 |
elif provider == "GoogleAI":
|
2462 |
return update_context_display(provider, googleai_model)
|
2463 |
return "Unknown"
|
2464 |
|
2465 |
+
def update_model_info_for_provider(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, anthropic_model, googleai_model):
|
2466 |
"""Update model info based on selected provider and model"""
|
2467 |
if provider == "OpenRouter":
|
2468 |
return update_model_info(provider, openrouter_model)
|
|
|
2476 |
return update_model_info(provider, cohere_model)
|
2477 |
elif provider == "Together":
|
2478 |
return update_model_info(provider, together_model)
|
2479 |
+
elif provider == "Anthropic":
|
2480 |
+
return update_model_info(provider, anthropic_model)
|
|
|
|
|
2481 |
elif provider == "GoogleAI":
|
2482 |
return update_model_info(provider, googleai_model)
|
2483 |
return "<p>Model information not available</p>"
|
|
|
2584 |
default_model = "meta-llama/Llama-3.1-8B-Instruct" if "meta-llama/Llama-3.1-8B-Instruct" in all_models else all_models[0] if all_models else None
|
2585 |
return gr.update(choices=all_models, value=default_model)
|
2586 |
|
2587 |
+
def search_anthropic_models(search_term):
|
2588 |
+
"""Filter Anthropic models based on search term"""
|
2589 |
+
all_models = list(ANTHROPIC_MODELS.keys())
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2590 |
if not search_term:
|
2591 |
+
return gr.update(choices=all_models, value="claude-3-5-sonnet-20241022" if "claude-3-5-sonnet-20241022" in all_models else all_models[0] if all_models else None)
|
|
|
2592 |
|
2593 |
filtered_models = [model for model in all_models if search_term.lower() in model.lower()]
|
2594 |
|
2595 |
if filtered_models:
|
2596 |
return gr.update(choices=filtered_models, value=filtered_models[0])
|
2597 |
else:
|
2598 |
+
return gr.update(choices=all_models, value="claude-3-5-sonnet-20241022" if "claude-3-5-sonnet-20241022" in all_models else all_models[0] if all_models else None)
|
|
|
2599 |
|
2600 |
def search_googleai_models(search_term):
|
2601 |
"""Filter GoogleAI models based on search term"""
|
|
|
2613 |
return gr.update(choices=all_models, value=default_model)
|
2614 |
|
2615 |
|
2616 |
+
def get_current_model(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model,
|
2617 |
+
together_model, anthropic_model, googleai_model):
|
2618 |
"""Get the currently selected model based on provider"""
|
2619 |
if provider == "OpenRouter":
|
2620 |
+
return openrouter_model
|
2621 |
elif provider == "OpenAI":
|
2622 |
+
return openai_model
|
2623 |
elif provider == "HuggingFace":
|
2624 |
+
return hf_model
|
2625 |
elif provider == "Groq":
|
2626 |
+
return groq_model
|
2627 |
elif provider == "Cohere":
|
2628 |
+
return cohere_model
|
2629 |
elif provider == "Together":
|
2630 |
+
return together_model
|
2631 |
+
elif provider == "Anthropic":
|
2632 |
+
return anthropic_model
|
|
|
|
|
2633 |
elif provider == "GoogleAI":
|
2634 |
+
return googleai_model
|
2635 |
return None
|
2636 |
|
2637 |
|
|
|
2653 |
groq_model,
|
2654 |
cohere_model,
|
2655 |
together_model,
|
2656 |
+
anthropic_model,
|
|
|
2657 |
googleai_model
|
2658 |
]
|
2659 |
).then(
|
2660 |
fn=update_context_for_provider,
|
2661 |
+
inputs=[provider_choice, openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, anthropic_model, googleai_model],
|
2662 |
outputs=context_display
|
2663 |
).then(
|
2664 |
fn=update_model_info_for_provider,
|
2665 |
+
inputs=[provider_choice, openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, anthropic_model, googleai_model],
|
2666 |
outputs=model_info_display
|
2667 |
).then(
|
2668 |
fn=lambda provider, model: update_vision_indicator(
|
2669 |
provider,
|
2670 |
+
get_current_model(provider, model, None, None, None, None, None, None, None)
|
2671 |
),
|
2672 |
inputs=[provider_choice, openrouter_model],
|
2673 |
outputs=is_vision_indicator
|
2674 |
).then(
|
2675 |
fn=lambda provider, model: update_image_upload_visibility(
|
2676 |
provider,
|
2677 |
+
get_current_model(provider, model, None, None, None, None, None, None, None)
|
2678 |
),
|
2679 |
inputs=[provider_choice, openrouter_model],
|
2680 |
outputs=image_upload_container
|
|
|
2689 |
search_groq_models(search) if provider == "Groq" else gr.update(),
|
2690 |
search_cohere_models(search) if provider == "Cohere" else gr.update(),
|
2691 |
search_together_models(search) if provider == "Together" else gr.update(),
|
2692 |
+
search_anthropic_models(search) if provider == "Anthropic" else gr.update(),
|
|
|
2693 |
search_googleai_models(search) if provider == "GoogleAI" else gr.update()
|
2694 |
],
|
2695 |
inputs=[provider_choice, model_search],
|
2696 |
outputs=[
|
2697 |
openrouter_model, openai_model, hf_model, groq_model,
|
2698 |
+
cohere_model, together_model, anthropic_model, googleai_model
|
2699 |
]
|
2700 |
)
|
2701 |
|
|
|
2808 |
outputs=image_upload_container
|
2809 |
)
|
2810 |
|
2811 |
+
anthropic_model.change(
|
2812 |
+
fn=lambda model: update_context_display("Anthropic", model),
|
2813 |
+
inputs=anthropic_model,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2814 |
outputs=context_display
|
2815 |
).then(
|
2816 |
+
fn=lambda model: update_model_info("Anthropic", model),
|
2817 |
+
inputs=anthropic_model,
|
2818 |
outputs=model_info_display
|
2819 |
).then(
|
2820 |
+
fn=lambda model: update_vision_indicator("Anthropic", model),
|
2821 |
+
inputs=anthropic_model,
|
2822 |
outputs=is_vision_indicator
|
2823 |
).then(
|
2824 |
+
fn=lambda model: update_image_upload_visibility("Anthropic", model),
|
2825 |
+
inputs=anthropic_model,
|
2826 |
outputs=image_upload_container
|
2827 |
)
|
2828 |
|
|
|
2858 |
return search_cohere_models(search_term)
|
2859 |
elif provider == "Together":
|
2860 |
return search_together_models(search_term)
|
2861 |
+
elif provider == "Anthropic":
|
2862 |
+
return search_anthropic_models(search_term)
|
|
|
|
|
2863 |
elif provider == "GoogleAI":
|
2864 |
return search_googleai_models(search_term)
|
2865 |
return None
|
2866 |
|
2867 |
# Set up submission event
|
2868 |
def submit_message(message, history, provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model,
|
2869 |
+
together_model, anthropic_model, googleai_model,
|
2870 |
+
temperature, max_tokens, top_p, frequency_penalty, presence_penalty, repetition_penalty,
|
2871 |
+
top_k, min_p, seed, top_a, stream_output, response_format,
|
2872 |
+
images, documents, reasoning_effort, system_message, transforms,
|
2873 |
+
openrouter_api_key, openai_api_key, hf_api_key, groq_api_key, cohere_api_key, together_api_key, anthropic_api_key, googleai_api_key):
|
2874 |
+
|
2875 |
"""Submit message to selected provider and model"""
|
2876 |
# Get the currently selected model
|
2877 |
model_choice = get_current_model(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model,
|
2878 |
+
together_model, anthropic_model, googleai_model)
|
2879 |
|
2880 |
# Check if model is selected
|
2881 |
if not model_choice:
|
|
|
2899 |
api_key_override = cohere_api_key
|
2900 |
elif provider == "Together" and together_api_key:
|
2901 |
api_key_override = together_api_key
|
2902 |
+
elif provider == "Anthropic" and anthropic_api_key:
|
2903 |
+
api_key_override = anthropic_api_key
|
2904 |
elif provider == "GoogleAI" and googleai_api_key:
|
2905 |
api_key_override = googleai_api_key
|
2906 |
|
|
|
2935 |
fn=submit_message,
|
2936 |
inputs=[
|
2937 |
message, chatbot, provider_choice,
|
2938 |
+
openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, anthropic_model, googleai_model,
|
2939 |
temperature, max_tokens, top_p, frequency_penalty, presence_penalty, repetition_penalty,
|
2940 |
top_k, min_p, seed, top_a, stream_output, response_format,
|
2941 |
images, documents, reasoning_effort, system_message, transforms,
|
|
|
2954 |
fn=submit_message,
|
2955 |
inputs=[
|
2956 |
message, chatbot, provider_choice,
|
2957 |
+
openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, anthropic_model, googleai_model,
|
2958 |
temperature, max_tokens, top_p, frequency_penalty, presence_penalty, repetition_penalty,
|
2959 |
top_k, min_p, seed, top_a, stream_output, response_format,
|
2960 |
images, documents, reasoning_effort, system_message, transforms,
|
|
|
2990 |
if not OPENROUTER_API_KEY:
|
2991 |
logger.warning("WARNING: OPENROUTER_API_KEY environment variable is not set")
|
2992 |
missing_keys.append("OpenRouter")
|
2993 |
+
|
2994 |
+
if not ANTHROPIC_API_KEY:
|
2995 |
+
logger.warning("WARNING: ANTHROPIC_API_KEY environment variable is not set")
|
2996 |
+
missing_keys.append("Anthropic")
|
2997 |
|
2998 |
if not OPENAI_API_KEY:
|
2999 |
logger.warning("WARNING: OPENAI_API_KEY environment variable is not set")
|