wuhp commited on
Commit
8756ea3
·
verified ·
1 Parent(s): e5dc606

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +60 -16
app.py CHANGED
@@ -14,6 +14,24 @@ from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_st
14
 
15
  # Removed the debugging print that attempts to read GOOGLE_API_KEY from environment
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
 
18
  # --- Helper functions for Hugging Face integration ---
19
 
@@ -66,7 +84,7 @@ def upload_file_to_space_action(
66
  profile: gr.OAuthProfile,
67
  token: gr.OAuthToken
68
  ) -> None:
69
- """Uploads a file to a Hugging Face Space repository."""
70
  if not (profile and token and repo_id):
71
  raise ValueError("Hugging Face profile, token, or repo_id is missing.")
72
  try:
@@ -149,11 +167,19 @@ def configure_gemini(api_key: str | None, model_name: str | None) -> str:
149
  # This will raise an exception if the key is invalid or model not found
150
  genai.GenerativeModel(model_name).generate_content("ping", stream=False)
151
  # This message indicates the API call *for configuration check* was successful
152
- return f"✅ Gemini configured successfully with **{model_name}**."
153
  except Exception as e:
154
  # This message indicates the API call *for configuration check* failed
155
  return f"❌ Error configuring Gemini: {e}"
156
 
 
 
 
 
 
 
 
 
157
  def call_gemini(prompt: str, api_key: str, model_name: str, use_grounding: bool = False) -> str:
158
  """Calls the Gemini API with a given prompt, optionally using grounding."""
159
  # This check is crucial - it will raise an error *before* the API call if prereqs aren't met
@@ -265,6 +291,11 @@ def wrapper_configure_gemini(prev_output, api_key: str | None, model_name: str |
265
  def wrapper_greet(prev_output):
266
  return greet()
267
 
 
 
 
 
 
268
 
269
  # This is the main generator function for the workflow, triggered by the 'Send' button
270
  # NOTE: This function MUST accept ALL state variables as inputs that it might need to modify or pass through.
@@ -895,8 +926,8 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
895
  hf_token = gr.State(None)
896
  # Initialize gemini_api_key_state to empty string
897
  gemini_api_key_state = gr.State("") # start with no key
898
- # Initialize gemini_model_state
899
- gemini_model_state = gr.State("gemini-1.5-flash") # Default selected model
900
 
901
  repo_id = gr.State(None) # Stores the ID of the created Space
902
  workflow = gr.State(STATE_IDLE) # Stores the current state of the AI workflow
@@ -932,16 +963,18 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
932
 
933
  # Define model_selector before it's used in its change handler
934
  model_selector = gr.Radio(
935
- choices=[
936
- ("Gemini 1.5 Flash", "gemini-1.5-flash"),
937
- ("Gemini 1.5 Pro", "gemini-1.5-pro"),
938
- ("Gemini 1.0 Pro", "gemini-1.0-pro"),
939
- ],
940
- value="gemini-1.5-flash", # Default selection
941
  label="Select model",
942
  interactive=True
943
  )
944
 
 
 
 
 
 
945
  # Define grounding checkbox before its change handler
946
  use_grounding_checkbox = gr.Checkbox(
947
  label="Enable Grounding with Google Search",
@@ -1012,7 +1045,7 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
1012
  inputs=[gemini_input],
1013
  outputs=[gemini_api_key_state] # This output becomes the implicit first arg for the .then() chain
1014
  ).then(
1015
- # Wrap configure_gemini because it's in a .then() chain and receives the output of the prior step
1016
  wrapper_configure_gemini, # Use the wrapper
1017
  inputs=[gemini_api_key_state, gemini_model_state], # Explicitly pass the required states
1018
  outputs=[gemini_status] # Update Gemini status display
@@ -1024,13 +1057,18 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
1024
  outputs=send_button_update_output
1025
  )
1026
 
1027
- # Handle Gemini Model Selector change: Update model state -> Configure Gemini status -> Update send button state
1028
  model_selector.change(
1029
  lambda m: m, # Updates gemini_model_state
1030
  inputs=[model_selector],
1031
- outputs=[gemini_model_state] # This output becomes the implicit first arg for the .then() chain
 
 
 
 
 
1032
  ).then(
1033
- # Wrap configure_gemini because it's in a .then() chain
1034
  wrapper_configure_gemini, # Use the wrapper
1035
  inputs=[gemini_api_key_state, gemini_model_state], # Explicitly pass the required states
1036
  outputs=[gemini_status] # Update Gemini status display
@@ -1107,10 +1145,16 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
1107
  # Action 3: After initial load checks, update the button state based on initial states
1108
  # check_send_button_ready signature now handles the implicit previous output + explicit inputs
1109
  check_send_button_ready,
1110
- inputs=send_button_interactive_binding_inputs, # Pass all 4 prerequisite states
1111
  outputs=send_button_update_output, # Update the send button. This output becomes implicit first arg for next .then()
1112
  ).then(
1113
- # Action 4: Add the initial welcome message to the chat history
 
 
 
 
 
 
1114
  # Wrap greet because it's in a .then() chain and receives the output of the prior step (check_send_button_ready)
1115
  wrapper_greet, # Use the wrapper
1116
  inputs=None, # Greet takes no explicit inputs
 
14
 
15
  # Removed the debugging print that attempts to read GOOGLE_API_KEY from environment
16
 
17
+ # --- Define Gemini Model Information ---
18
+ # Dictionary mapping internal model name to (Display Name, Description)
19
+ GEMINI_MODELS = {
20
+ "gemini-1.5-flash": ("Gemini 1.5 Flash", "Fast and versatile performance across a diverse variety of tasks."),
21
+ "gemini-1.5-pro": ("Gemini 1.5 Pro", "Complex reasoning tasks requiring more intelligence."),
22
+ "gemini-1.5-flash-8b": ("Gemini 1.5 Flash 8B", "High volume and lower intelligence tasks."),
23
+ "gemini-2.0-flash": ("Gemini 2.0 Flash", "Next generation features, speed, thinking, realtime streaming, and multimodal generation."),
24
+ "gemini-2.0-flash-lite": ("Gemini 2.0 Flash-Lite", "Cost efficiency and low latency."),
25
+ # Note: Preview models might have shorter lifespans or different capabilities
26
+ # "gemini-2.5-flash-preview-04-17": ("Gemini 2.5 Flash Preview (04-17)", "Adaptive thinking, cost efficiency."),
27
+ # "gemini-2.5-pro-preview-03-25": ("Gemini 2.5 Pro Preview (03-25)", "Enhanced thinking and reasoning, multimodal understanding, advanced coding, and more."),
28
+ }
29
+
30
+ # Create the list of choices for the Gradio Radio component
31
+ # Format is (Display Name, Internal Name)
32
+ GEMINI_MODEL_CHOICES = [(display_name, internal_name) for internal_name, (display_name, description) in GEMINI_MODELS.items()]
33
+ # Define the default model to be selected
34
+ DEFAULT_GEMINI_MODEL = "gemini-1.5-flash" # Ensure this key exists in GEMINI_MODELS
35
 
36
  # --- Helper functions for Hugging Face integration ---
37
 
 
84
  profile: gr.OAuthProfile,
85
  token: gr.OAuthToken
86
  ) -> None:
87
+ """Uploads a file to a Huging Face Space repository."""
88
  if not (profile and token and repo_id):
89
  raise ValueError("Hugging Face profile, token, or repo_id is missing.")
90
  try:
 
167
  # This will raise an exception if the key is invalid or model not found
168
  genai.GenerativeModel(model_name).generate_content("ping", stream=False)
169
  # This message indicates the API call *for configuration check* was successful
170
+ return f"✅ Gemini configured successfully with **{GEMINI_MODELS.get(model_name, ('Unknown Model', ''))[0]}**."
171
  except Exception as e:
172
  # This message indicates the API call *for configuration check* failed
173
  return f"❌ Error configuring Gemini: {e}"
174
 
175
+ def get_model_description(model_name: str | None) -> str:
176
+ """Retrieves the description for a given model name."""
177
+ if model_name is None:
178
+ return "Select a model to see its description."
179
+ # Use .get with a default value to handle cases where the key might not be found
180
+ return GEMINI_MODELS.get(model_name, (model_name, "No description available."))[1]
181
+
182
+
183
  def call_gemini(prompt: str, api_key: str, model_name: str, use_grounding: bool = False) -> str:
184
  """Calls the Gemini API with a given prompt, optionally using grounding."""
185
  # This check is crucial - it will raise an error *before* the API call if prereqs aren't met
 
291
  def wrapper_greet(prev_output):
292
  return greet()
293
 
294
+ # Wrapper function for get_model_description when called in a .then() chain
295
+ # It accepts the output of the previous function as the first arg and discards it.
296
+ def wrapper_get_model_description(prev_output, model_name: str | None) -> str:
297
+ return get_model_description(model_name)
298
+
299
 
300
  # This is the main generator function for the workflow, triggered by the 'Send' button
301
  # NOTE: This function MUST accept ALL state variables as inputs that it might need to modify or pass through.
 
926
  hf_token = gr.State(None)
927
  # Initialize gemini_api_key_state to empty string
928
  gemini_api_key_state = gr.State("") # start with no key
929
+ # Initialize gemini_model_state with the default model key
930
+ gemini_model_state = gr.State(DEFAULT_GEMINI_MODEL) # Default selected model
931
 
932
  repo_id = gr.State(None) # Stores the ID of the created Space
933
  workflow = gr.State(STATE_IDLE) # Stores the current state of the AI workflow
 
963
 
964
  # Define model_selector before it's used in its change handler
965
  model_selector = gr.Radio(
966
+ # Use the list of choices generated from the GEMINI_MODELS dictionary
967
+ choices=GEMINI_MODEL_CHOICES,
968
+ value=DEFAULT_GEMINI_MODEL, # Default selection using the key
 
 
 
969
  label="Select model",
970
  interactive=True
971
  )
972
 
973
+ # Add a markdown field to display the model description
974
+ # Initialize with the description of the default model
975
+ model_description_text = gr.Markdown(get_model_description(DEFAULT_GEMINI_MODEL))
976
+
977
+
978
  # Define grounding checkbox before its change handler
979
  use_grounding_checkbox = gr.Checkbox(
980
  label="Enable Grounding with Google Search",
 
1045
  inputs=[gemini_input],
1046
  outputs=[gemini_api_key_state] # This output becomes the implicit first arg for the .then() chain
1047
  ).then(
1048
+ # Configure Gemini using the updated state variables
1049
  wrapper_configure_gemini, # Use the wrapper
1050
  inputs=[gemini_api_key_state, gemini_model_state], # Explicitly pass the required states
1051
  outputs=[gemini_status] # Update Gemini status display
 
1057
  outputs=send_button_update_output
1058
  )
1059
 
1060
+ # Handle Gemini Model Selector change: Update model state -> Update description -> Configure Gemini status -> Update send button state
1061
  model_selector.change(
1062
  lambda m: m, # Updates gemini_model_state
1063
  inputs=[model_selector],
1064
+ outputs=[gemini_model_state] # This output becomes the implicit first arg for the next .then() in this chain
1065
+ ).then(
1066
+ # Update the model description display
1067
+ wrapper_get_model_description, # Use the wrapper
1068
+ inputs=[gemini_model_state], # Get the new state value
1069
+ outputs=[model_description_text] # Update description UI
1070
  ).then(
1071
+ # Configure Gemini using the updated state variables
1072
  wrapper_configure_gemini, # Use the wrapper
1073
  inputs=[gemini_api_key_state, gemini_model_state], # Explicitly pass the required states
1074
  outputs=[gemini_status] # Update Gemini status display
 
1145
  # Action 3: After initial load checks, update the button state based on initial states
1146
  # check_send_button_ready signature now handles the implicit previous output + explicit inputs
1147
  check_send_button_ready,
1148
+ inputs=send_button_interactive_binding_inputs,
1149
  outputs=send_button_update_output, # Update the send button. This output becomes implicit first arg for next .then()
1150
  ).then(
1151
+ # Action 4: Update the model description text based on the default selected model
1152
+ # Use the wrapper because it's in a .then() chain
1153
+ wrapper_get_model_description,
1154
+ inputs=[gemini_model_state], # Get the default model name from state
1155
+ outputs=[model_description_text] # Update description UI
1156
+ ).then(
1157
+ # Action 5: Add the initial welcome message to the chat history
1158
  # Wrap greet because it's in a .then() chain and receives the output of the prior step (check_send_button_ready)
1159
  wrapper_greet, # Use the wrapper
1160
  inputs=None, # Greet takes no explicit inputs