cstr commited on
Commit
7717985
·
verified ·
1 Parent(s): 499fdba

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +192 -13
app.py CHANGED
@@ -75,6 +75,7 @@ HF_API_KEY = os.environ.get("HF_API_KEY", "")
75
  TOGETHER_API_KEY = os.environ.get("TOGETHER_API_KEY", "")
76
  GOOGLEAI_API_KEY = os.environ.get("GOOGLEAI_API_KEY", "")
77
  ANTHROPIC_API_KEY = os.environ.get("ANTHROPIC_API_KEY", "")
 
78
 
79
  # Print application startup message with timestamp
80
  current_time = time.strftime("%Y-%m-%d %H:%M:%S")
@@ -218,6 +219,48 @@ VISION_MODELS = {
218
  "GoogleAI": ["gemini-1.5-pro", "gemini-1.0-pro", "gemini-1.5-flash", "gemini-2.0-pro", "gemini-2.5-pro"]
219
  }
220
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
221
  # OPENAI MODELS
222
  OPENAI_MODELS = {
223
  "gpt-3.5-turbo": 16385,
@@ -618,6 +661,9 @@ def get_model_info(provider, model_choice):
618
  for name, model_id, ctx_size in OPENROUTER_ALL_MODELS:
619
  if name == model_choice:
620
  return model_id, ctx_size
 
 
 
621
  elif provider == "OpenAI":
622
  if model_choice in OPENAI_MODELS:
623
  return model_choice, OPENAI_MODELS[model_choice]
@@ -787,7 +833,73 @@ def call_anthropic_api(payload, api_key_override=None):
787
  except Exception as e:
788
  logger.error(f"Anthropic API error: {str(e)}")
789
  raise e
790
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
791
  def call_openrouter_api(payload, api_key_override=None):
792
  """Make a call to OpenRouter API with error handling"""
793
  try:
@@ -1030,7 +1142,12 @@ def extract_ai_response(result, provider):
1030
  return text_content
1031
  else:
1032
  return "No response content from Cohere"
1033
-
 
 
 
 
 
1034
  elif provider == "Together":
1035
  # Handle response from Together's native client
1036
  if hasattr(result, "choices") and len(result.choices) > 0:
@@ -1580,6 +1697,43 @@ def ask_ai(message, history, provider, model_choice, temperature, max_tokens, to
1580
  {"role": "assistant", "content": error_message}
1581
  ]
1582
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1583
  elif provider == "Anthropic":
1584
  # Get model ID from registry
1585
  model_id, _ = get_model_info(provider, model_choice)
@@ -2101,9 +2255,9 @@ def create_app():
2101
  with gr.Group(elem_classes="provider-selection"):
2102
  gr.Markdown("### Provider Selection")
2103
 
2104
- # Provider selection - removed OVH and Cerebras, added Anthropic
2105
  provider_choice = gr.Radio(
2106
- choices=["OpenRouter", "OpenAI", "HuggingFace", "Groq", "Cohere", "Together", "Anthropic", "GoogleAI"],
2107
  value="OpenRouter",
2108
  label="AI Provider"
2109
  )
@@ -2118,6 +2272,13 @@ def create_app():
2118
  type="password",
2119
  value=OPENROUTER_API_KEY if OPENROUTER_API_KEY else ""
2120
  )
 
 
 
 
 
 
 
2121
 
2122
  openai_api_key = gr.Textbox(
2123
  placeholder="Enter OpenAI API key",
@@ -2187,6 +2348,15 @@ def create_app():
2187
  elem_id="openrouter-model-choice",
2188
  visible=True
2189
  )
 
 
 
 
 
 
 
 
 
2190
 
2191
  openai_model = gr.Dropdown(
2192
  choices=list(OPENAI_MODELS.keys()),
@@ -2437,6 +2607,7 @@ def create_app():
2437
  cohere_model: gr.update(visible=(provider == "Cohere")),
2438
  together_model: gr.update(visible=(provider == "Together")),
2439
  anthropic_model: gr.update(visible=(provider == "Anthropic")),
 
2440
  googleai_model: gr.update(visible=(provider == "GoogleAI"))
2441
  }
2442
 
@@ -2612,7 +2783,7 @@ def create_app():
2612
 
2613
 
2614
  def get_current_model(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model,
2615
- together_model, anthropic_model, googleai_model):
2616
  """Get the currently selected model based on provider"""
2617
  if provider == "OpenRouter":
2618
  return openrouter_model
@@ -2628,11 +2799,12 @@ def create_app():
2628
  return together_model
2629
  elif provider == "Anthropic":
2630
  return anthropic_model
 
 
2631
  elif provider == "GoogleAI":
2632
  return googleai_model
2633
  return None
2634
 
2635
-
2636
  # Process uploaded images
2637
  image_upload_btn.upload(
2638
  fn=lambda files: files,
@@ -2864,16 +3036,16 @@ def create_app():
2864
 
2865
  # Set up submission event
2866
  def submit_message(message, history, provider,
2867
- openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, anthropic_model, googleai_model,
2868
  temperature, max_tokens, top_p, frequency_penalty, presence_penalty, repetition_penalty,
2869
  top_k, min_p, seed, top_a, stream_output, response_format,
2870
  images, documents, reasoning_effort, system_message, transforms,
2871
- openrouter_api_key, openai_api_key, hf_api_key, groq_api_key, cohere_api_key, together_api_key, anthropic_api_key, googleai_api_key):
2872
 
2873
  """Submit message to selected provider and model"""
2874
  # Get the currently selected model
2875
  model_choice = get_current_model(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model,
2876
- together_model, anthropic_model, googleai_model)
2877
 
2878
  # Check if model is selected
2879
  if not model_choice:
@@ -2899,6 +3071,8 @@ def create_app():
2899
  api_key_override = together_api_key
2900
  elif provider == "Anthropic" and anthropic_api_key:
2901
  api_key_override = anthropic_api_key
 
 
2902
  elif provider == "GoogleAI" and googleai_api_key:
2903
  api_key_override = googleai_api_key
2904
 
@@ -2933,11 +3107,11 @@ def create_app():
2933
  fn=submit_message,
2934
  inputs=[
2935
  message, chatbot, provider_choice,
2936
- openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, anthropic_model, googleai_model,
2937
  temperature, max_tokens, top_p, frequency_penalty, presence_penalty, repetition_penalty,
2938
  top_k, min_p, seed, top_a, stream_output, response_format,
2939
  images, documents, reasoning_effort, system_message, transforms,
2940
- openrouter_api_key, openai_api_key, hf_api_key, groq_api_key, cohere_api_key, together_api_key, anthropic_api_key, googleai_api_key
2941
  ],
2942
  outputs=chatbot,
2943
  show_progress="minimal",
@@ -2952,11 +3126,11 @@ def create_app():
2952
  fn=submit_message,
2953
  inputs=[
2954
  message, chatbot, provider_choice,
2955
- openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, anthropic_model, googleai_model,
2956
  temperature, max_tokens, top_p, frequency_penalty, presence_penalty, repetition_penalty,
2957
  top_k, min_p, seed, top_a, stream_output, response_format,
2958
  images, documents, reasoning_effort, system_message, transforms,
2959
- openrouter_api_key, openai_api_key, hf_api_key, groq_api_key, cohere_api_key, together_api_key, anthropic_api_key, googleai_api_key
2960
  ],
2961
  outputs=chatbot,
2962
  show_progress="minimal",
@@ -2989,6 +3163,11 @@ if __name__ == "__main__":
2989
  logger.warning("WARNING: OPENROUTER_API_KEY environment variable is not set")
2990
  missing_keys.append("OpenRouter")
2991
 
 
 
 
 
 
2992
  if not ANTHROPIC_API_KEY:
2993
  logger.warning("WARNING: ANTHROPIC_API_KEY environment variable is not set")
2994
  missing_keys.append("Anthropic")
 
75
  TOGETHER_API_KEY = os.environ.get("TOGETHER_API_KEY", "")
76
  GOOGLEAI_API_KEY = os.environ.get("GOOGLEAI_API_KEY", "")
77
  ANTHROPIC_API_KEY = os.environ.get("ANTHROPIC_API_KEY", "")
78
+ POE_API_KEY = os.environ.get("POE_API_KEY", "")
79
 
80
  # Print application startup message with timestamp
81
  current_time = time.strftime("%Y-%m-%d %H:%M:%S")
 
219
  "GoogleAI": ["gemini-1.5-pro", "gemini-1.0-pro", "gemini-1.5-flash", "gemini-2.0-pro", "gemini-2.5-pro"]
220
  }
221
 
222
+ # POE MODELS
223
+ POE_MODELS = {
224
+ "claude_3_igloo": 4000, # Claude-3.5-Sonnet
225
+ "claude_2_1_cedar": 4000, # Claude-3-Opus
226
+ "claude_2_1_bamboo": 4000, # Claude-3-Sonnet
227
+ "claude_3_haiku": 4000, # Claude-3-Haiku
228
+ "claude_3_igloo_200k": 200000, # Claude-3.5-Sonnet-200k
229
+ "claude_3_opus_200k": 200000, # Claude-3-Opus-200k
230
+ "claude_3_sonnet_200k": 200000, # Claude-3-Sonnet-200k
231
+ "claude_3_haiku_200k": 200000, # Claude-3-Haiku-200k
232
+ "claude_2_short": 4000, # Claude-2
233
+ "a2_2": 100000, # Claude-2-100k
234
+ "a2": 9000, # Claude-instant
235
+ "a2_100k": 100000, # Claude-instant-100k
236
+ "chinchilla": 4000, # GPT-3.5-Turbo
237
+ "gpt3_5": 2000, # GPT-3.5-Turbo-Raw
238
+ "chinchilla_instruct": 2000, # GPT-3.5-Turbo-Instruct
239
+ "agouti": 16000, # ChatGPT-16k
240
+ "gpt4_classic": 2000, # GPT-4-Classic
241
+ "beaver": 4000, # GPT-4-Turbo
242
+ "vizcacha": 128000, # GPT-4-Turbo-128k
243
+ "gpt4_o": 4000, # GPT-4o
244
+ "gpt4_o_128k": 128000, # GPT-4o-128k
245
+ "gpt4_o_mini": 4000, # GPT-4o-Mini
246
+ "gpt4_o_mini_128k": 128000, # GPT-4o-Mini-128k
247
+ "acouchy": 8000, # Google-PaLM
248
+ "code_llama_13b_instruct": 4000, # Code-Llama-13b
249
+ "code_llama_34b_instruct": 4000, # Code-Llama-34b
250
+ "upstage_solar_0_70b_16bit": 2000, # Solar-Mini
251
+ "gemini_pro_search": 4000, # Gemini-1.5-Flash-Search
252
+ "gemini_1_5_pro_1m": 2000000, # Gemini-1.5-Pro-2M
253
+ }
254
+
255
+ # Add vision-capable models to vision models list
256
+ POE_VISION_MODELS = [
257
+ "claude_3_igloo", "claude_2_1_cedar", "claude_2_1_bamboo", "claude_3_haiku",
258
+ "claude_3_igloo_200k", "claude_3_opus_200k", "claude_3_sonnet_200k", "claude_3_haiku_200k",
259
+ "gpt4_o", "gpt4_o_128k", "gpt4_o_mini", "gpt4_o_mini_128k", "beaver", "vizcacha"
260
+ ]
261
+
262
+ VISION_MODELS["Poe"] = POE_VISION_MODELS
263
+
264
  # OPENAI MODELS
265
  OPENAI_MODELS = {
266
  "gpt-3.5-turbo": 16385,
 
661
  for name, model_id, ctx_size in OPENROUTER_ALL_MODELS:
662
  if name == model_choice:
663
  return model_id, ctx_size
664
+ elif provider == "Poe":
665
+ if model_choice in POE_MODELS:
666
+ return model_choice, POE_MODELS[model_choice]
667
  elif provider == "OpenAI":
668
  if model_choice in OPENAI_MODELS:
669
  return model_choice, OPENAI_MODELS[model_choice]
 
833
  except Exception as e:
834
  logger.error(f"Anthropic API error: {str(e)}")
835
  raise e
836
+
837
+ def call_poe_api(payload, api_key_override=None):
838
+ """Make a call to Poe API with error handling"""
839
+ try:
840
+ # Try to import fastapi_poe
841
+ try:
842
+ import fastapi_poe as fp
843
+ except ImportError:
844
+ raise ImportError("fastapi_poe package not installed. Install it with: pip install fastapi_poe")
845
+
846
+ api_key = api_key_override if api_key_override else os.environ.get("POE_API_KEY", "")
847
+ if not api_key:
848
+ raise ValueError("Poe API key is required")
849
+
850
+ # Extract parameters from payload
851
+ model = payload.get("model", "chinchilla") # Default to GPT-3.5-Turbo
852
+ messages = payload.get("messages", [])
853
+
854
+ # Convert messages to Poe format
855
+ poe_messages = []
856
+ for msg in messages:
857
+ role = msg["role"]
858
+ content = msg["content"]
859
+
860
+ # Skip system messages as Poe doesn't support them directly
861
+ if role == "system":
862
+ continue
863
+
864
+ # Convert content format
865
+ if isinstance(content, list):
866
+ # Handle multimodal content (images)
867
+ text_parts = []
868
+ for item in content:
869
+ if item["type"] == "text":
870
+ text_parts.append(item["text"])
871
+
872
+ # For images, we'll need to extract and handle them separately
873
+ # This is a simplified approach - in reality, you'd need to handle images properly
874
+ content = "\n".join(text_parts)
875
+
876
+ # Add message to Poe messages
877
+ poe_messages.append(fp.ProtocolMessage(role=role, content=content))
878
+
879
+ # Make synchronous request to Poe
880
+ response_content = ""
881
+ for partial in fp.get_bot_response_sync(messages=poe_messages, bot_name=model, api_key=api_key):
882
+ if hasattr(partial, "text"):
883
+ response_content += partial.text
884
+
885
+ # Create a response object with a structure similar to other APIs
886
+ response = {
887
+ "id": f"poe-{int(time.time())}",
888
+ "choices": [
889
+ {
890
+ "message": {
891
+ "role": "assistant",
892
+ "content": response_content
893
+ }
894
+ }
895
+ ]
896
+ }
897
+
898
+ return response
899
+ except Exception as e:
900
+ logger.error(f"Poe API error: {str(e)}")
901
+ raise e
902
+
903
  def call_openrouter_api(payload, api_key_override=None):
904
  """Make a call to OpenRouter API with error handling"""
905
  try:
 
1142
  return text_content
1143
  else:
1144
  return "No response content from Cohere"
1145
+
1146
+ elif provider == "Poe":
1147
+ if isinstance(result, dict) and "choices" in result and len(result["choices"]) > 0:
1148
+ return result["choices"][0]["message"]["content"]
1149
+ return "No response content from Poe"
1150
+
1151
  elif provider == "Together":
1152
  # Handle response from Together's native client
1153
  if hasattr(result, "choices") and len(result.choices) > 0:
 
1697
  {"role": "assistant", "content": error_message}
1698
  ]
1699
 
1700
+ elif provider == "Poe":
1701
+ # Get model ID from registry
1702
+ model_id, _ = get_model_info(provider, model_choice)
1703
+ if not model_id:
1704
+ error_message = f"Error: Model '{model_choice}' not found in Poe"
1705
+ return history + [
1706
+ {"role": "user", "content": message},
1707
+ {"role": "assistant", "content": error_message}
1708
+ ]
1709
+
1710
+ # Build Poe payload
1711
+ payload = {
1712
+ "model": model_id,
1713
+ "messages": messages
1714
+ # Poe doesn't support most parameters directly
1715
+ }
1716
+
1717
+ # Call Poe API
1718
+ logger.info(f"Sending request to Poe model: {model_id}")
1719
+
1720
+ try:
1721
+ response = call_poe_api(payload, api_key_override)
1722
+
1723
+ # Extract response
1724
+ ai_response = extract_ai_response(response, provider)
1725
+ return history + [
1726
+ {"role": "user", "content": message},
1727
+ {"role": "assistant", "content": ai_response}
1728
+ ]
1729
+ except Exception as e:
1730
+ error_message = f"Poe API Error: {str(e)}"
1731
+ logger.error(error_message)
1732
+ return history + [
1733
+ {"role": "user", "content": message},
1734
+ {"role": "assistant", "content": error_message}
1735
+ ]
1736
+
1737
  elif provider == "Anthropic":
1738
  # Get model ID from registry
1739
  model_id, _ = get_model_info(provider, model_choice)
 
2255
  with gr.Group(elem_classes="provider-selection"):
2256
  gr.Markdown("### Provider Selection")
2257
 
2258
+ # Provider selection
2259
  provider_choice = gr.Radio(
2260
+ choices=["OpenRouter", "OpenAI", "HuggingFace", "Groq", "Cohere", "Together", "Anthropic", "Poe", "GoogleAI"],
2261
  value="OpenRouter",
2262
  label="AI Provider"
2263
  )
 
2272
  type="password",
2273
  value=OPENROUTER_API_KEY if OPENROUTER_API_KEY else ""
2274
  )
2275
+
2276
+ poe_api_key = gr.Textbox(
2277
+ placeholder="Enter Poe API key",
2278
+ label="Poe API Key",
2279
+ type="password",
2280
+ value=os.environ.get("POE_API_KEY", "")
2281
+ )
2282
 
2283
  openai_api_key = gr.Textbox(
2284
  placeholder="Enter OpenAI API key",
 
2348
  elem_id="openrouter-model-choice",
2349
  visible=True
2350
  )
2351
+
2352
+ # Add Poe model dropdown
2353
+ poe_model = gr.Dropdown(
2354
+ choices=list(POE_MODELS.keys()),
2355
+ value="chinchilla" if "chinchilla" in POE_MODELS else None,
2356
+ label="Poe Model",
2357
+ elem_id="poe-model-choice",
2358
+ visible=False
2359
+ )
2360
 
2361
  openai_model = gr.Dropdown(
2362
  choices=list(OPENAI_MODELS.keys()),
 
2607
  cohere_model: gr.update(visible=(provider == "Cohere")),
2608
  together_model: gr.update(visible=(provider == "Together")),
2609
  anthropic_model: gr.update(visible=(provider == "Anthropic")),
2610
+ poe_model: gr.update(visible=(provider == "Poe")),
2611
  googleai_model: gr.update(visible=(provider == "GoogleAI"))
2612
  }
2613
 
 
2783
 
2784
 
2785
  def get_current_model(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model,
2786
+ together_model, anthropic_model, poe_model, googleai_model):
2787
  """Get the currently selected model based on provider"""
2788
  if provider == "OpenRouter":
2789
  return openrouter_model
 
2799
  return together_model
2800
  elif provider == "Anthropic":
2801
  return anthropic_model
2802
+ elif provider == "Poe":
2803
+ return poe_model
2804
  elif provider == "GoogleAI":
2805
  return googleai_model
2806
  return None
2807
 
 
2808
  # Process uploaded images
2809
  image_upload_btn.upload(
2810
  fn=lambda files: files,
 
3036
 
3037
  # Set up submission event
3038
  def submit_message(message, history, provider,
3039
+ openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, anthropic_model, poe_model, googleai_model,
3040
  temperature, max_tokens, top_p, frequency_penalty, presence_penalty, repetition_penalty,
3041
  top_k, min_p, seed, top_a, stream_output, response_format,
3042
  images, documents, reasoning_effort, system_message, transforms,
3043
+ openrouter_api_key, openai_api_key, hf_api_key, groq_api_key, cohere_api_key, together_api_key, anthropic_api_key, poe_api_key, googleai_api_key):
3044
 
3045
  """Submit message to selected provider and model"""
3046
  # Get the currently selected model
3047
  model_choice = get_current_model(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model,
3048
+ together_model, anthropic_model, poe_model, googleai_model)
3049
 
3050
  # Check if model is selected
3051
  if not model_choice:
 
3071
  api_key_override = together_api_key
3072
  elif provider == "Anthropic" and anthropic_api_key:
3073
  api_key_override = anthropic_api_key
3074
+ elif provider_choice == "Poe" and poe_api_key:
3075
+ api_key_override = poe_api_key
3076
  elif provider == "GoogleAI" and googleai_api_key:
3077
  api_key_override = googleai_api_key
3078
 
 
3107
  fn=submit_message,
3108
  inputs=[
3109
  message, chatbot, provider_choice,
3110
+ openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, anthropic_model, poe_model, googleai_model,
3111
  temperature, max_tokens, top_p, frequency_penalty, presence_penalty, repetition_penalty,
3112
  top_k, min_p, seed, top_a, stream_output, response_format,
3113
  images, documents, reasoning_effort, system_message, transforms,
3114
+ openrouter_api_key, openai_api_key, hf_api_key, groq_api_key, cohere_api_key, together_api_key, anthropic_api_key, poe_api_key, googleai_api_key
3115
  ],
3116
  outputs=chatbot,
3117
  show_progress="minimal",
 
3126
  fn=submit_message,
3127
  inputs=[
3128
  message, chatbot, provider_choice,
3129
+ openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, anthropic_model, poe_model, googleai_model,
3130
  temperature, max_tokens, top_p, frequency_penalty, presence_penalty, repetition_penalty,
3131
  top_k, min_p, seed, top_a, stream_output, response_format,
3132
  images, documents, reasoning_effort, system_message, transforms,
3133
+ openrouter_api_key, openai_api_key, hf_api_key, groq_api_key, cohere_api_key, together_api_key, anthropic_api_key, poe_api_key, googleai_api_key
3134
  ],
3135
  outputs=chatbot,
3136
  show_progress="minimal",
 
3163
  logger.warning("WARNING: OPENROUTER_API_KEY environment variable is not set")
3164
  missing_keys.append("OpenRouter")
3165
 
3166
+ # Add Poe
3167
+ if not POE_API_KEY:
3168
+ logger.warning("WARNING: POE_API_KEY environment variable is not set")
3169
+ missing_keys.append("Poe")
3170
+
3171
  if not ANTHROPIC_API_KEY:
3172
  logger.warning("WARNING: ANTHROPIC_API_KEY environment variable is not set")
3173
  missing_keys.append("Anthropic")