Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -55,7 +55,7 @@ def create_space_action(repo_name: str, sdk: str, profile: gr.OAuthProfile, toke
|
|
55 |
space_sdk=sdk
|
56 |
)
|
57 |
url = f"https://huggingface.co/spaces/{repo_id}"
|
58 |
-
iframe = f'<iframe src="{url}" width="100%" height="500px"></iframe>'
|
59 |
return repo_id, iframe
|
60 |
except Exception as e:
|
61 |
raise RuntimeError(f"Failed to create Space `{repo_id}`: {e}")
|
@@ -140,20 +140,25 @@ def get_container_logs_action(repo_id, profile, token):
|
|
140 |
def configure_gemini(api_key: str | None, model_name: str | None) -> str:
|
141 |
"""Configures the Gemini API and checks if the model is accessible."""
|
142 |
if not api_key:
|
|
|
143 |
return "⚠️ Gemini API key is not set."
|
144 |
if not model_name:
|
|
|
145 |
return "⚠️ Please select a Gemini model."
|
146 |
try:
|
147 |
genai.configure(api_key=api_key)
|
148 |
# Attempt a simple call to verify credentials and model availability
|
149 |
# This will raise an exception if the key is invalid or model not found
|
150 |
genai.GenerativeModel(model_name).generate_content("ping", stream=False)
|
|
|
151 |
return f"✅ Gemini configured successfully with **{model_name}**."
|
152 |
except Exception as e:
|
|
|
153 |
return f"❌ Error configuring Gemini: {e}"
|
154 |
|
155 |
def call_gemini(prompt: str, api_key: str, model_name: str, use_grounding: bool = False) -> str:
|
156 |
"""Calls the Gemini API with a given prompt, optionally using grounding."""
|
|
|
157 |
if not api_key or not model_name:
|
158 |
raise ValueError("Gemini API key or model not set.")
|
159 |
try:
|
@@ -217,47 +222,7 @@ def add_bot_message(history: list[dict], bot_message: str) -> list[dict]:
|
|
217 |
history.append({"role": "assistant", "content": bot_message})
|
218 |
return history
|
219 |
|
220 |
-
# Helper function to control send button interactivity and prerequisite status text
|
221 |
-
# This function is triggered by changes in login status and Gemini configuration
|
222 |
-
# Defined OUTSIDE the gr.Blocks context
|
223 |
-
def update_send_button_state(
|
224 |
-
profile: gr.OAuthProfile | None,
|
225 |
-
token: gr.OAuthToken | None,
|
226 |
-
key: str | None,
|
227 |
-
model: str | None,
|
228 |
-
# Absorb potential extra args Gradio passes to event handlers
|
229 |
-
*args,
|
230 |
-
**kwargs
|
231 |
-
):
|
232 |
-
"""Determines if the send button should be active and updates status text."""
|
233 |
-
# Add debug prints to see what values this function is getting
|
234 |
-
print(f"update_send_button_state - profile: {profile is not None}, token: {token is not None}, key: {key is not None}, model: {model is not None}")
|
235 |
-
is_logged_in = profile is not None and token is not None
|
236 |
-
is_gemini_ready = key is not None and model is not None # Check if key and model are set
|
237 |
-
|
238 |
-
status_parts = []
|
239 |
-
if not is_logged_in:
|
240 |
-
status_parts.append("⚠️ Not logged in to Hugging Face.")
|
241 |
-
if not key:
|
242 |
-
status_parts.append("⚠️ Gemini API key not set.")
|
243 |
-
if not model:
|
244 |
-
status_parts.append("⚠️ Gemini model not selected.")
|
245 |
-
|
246 |
-
is_ready = is_logged_in and is_gemini_ready
|
247 |
-
|
248 |
-
if is_ready:
|
249 |
-
status_str = "✅ Ready to send commands."
|
250 |
-
else:
|
251 |
-
status_str = " ".join(status_parts)
|
252 |
-
# Fallback, should not be needed if not is_ready, but good practice
|
253 |
-
if not status_str:
|
254 |
-
status_str = "Checking prerequisites..."
|
255 |
-
|
256 |
-
# gr.update is used to dynamically change a component's properties
|
257 |
-
return gr.update(interactive=is_ready), status_str
|
258 |
-
|
259 |
# Add an initial welcome message to the chatbot (defined outside Blocks to be called by load chain)
|
260 |
-
# Defined OUTSIDE the gr.Blocks context
|
261 |
def greet():
|
262 |
return [{"role": "assistant", "content": "Welcome! Please log in to Hugging Face and provide your Google AI Studio API key to start building Spaces. Once ready, type 'generate me a gradio app called myapp' or 'create' to begin."}]
|
263 |
|
@@ -346,7 +311,8 @@ def ai_workflow_chat(
|
|
346 |
return # Exit the generator for this click
|
347 |
|
348 |
if not (gemini_api_key and gemini_model):
|
349 |
-
|
|
|
350 |
# Yield updated history and current state, then exit for this click
|
351 |
yield (history, repo_id, state, updated_preview, updated_run, updated_build,
|
352 |
attempts, app_desc, repo_name, generated_code, use_grounding)
|
@@ -559,8 +525,9 @@ Return **only** the python code block for `app.py`. Do not include any extra tex
|
|
559 |
reqs_list = ["gradio"] if space_sdk == "gradio" else ["streamlit"]
|
560 |
# Add essential libraries regardless of description keywords or grounding
|
561 |
essential_libs = ["google-generativeai", "huggingface_hub"]
|
562 |
-
|
563 |
-
|
|
|
564 |
|
565 |
# Add common libraries if description suggests they might be needed
|
566 |
if app_desc:
|
@@ -939,9 +906,6 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
939 |
status_text = gr.Textbox(label="Current State", value=STATE_IDLE, interactive=False)
|
940 |
repo_id_text = gr.Textbox(label="Current Space ID", value="None", interactive=False)
|
941 |
|
942 |
-
# Define prereq_status before it's used in handlers that update it
|
943 |
-
prereq_status = gr.Markdown("Checking prerequisites...")
|
944 |
-
|
945 |
|
946 |
# Main content area column
|
947 |
with gr.Column(scale=3):
|
@@ -960,37 +924,46 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
960 |
|
961 |
# --- Define Event Handlers and Chains AFTER all components are defined ---
|
962 |
|
963 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
964 |
login_btn.click(
|
965 |
-
# The LoginButton outputs a tuple (OAuthProfile, OAuthToken) on success
|
966 |
lambda x: (x[0], x[1]),
|
967 |
inputs=[login_btn],
|
968 |
-
outputs=[hf_profile, hf_token] # Update
|
969 |
-
).then(
|
970 |
-
|
971 |
-
update_send_button_state,
|
972 |
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
|
973 |
-
outputs=[send_btn
|
974 |
)
|
975 |
|
976 |
-
# Handle Gemini Key Input change:
|
977 |
gemini_input.change(
|
978 |
lambda k: k, inputs=[gemini_input], outputs=[gemini_key] # Update gemini_key state
|
979 |
).then(
|
980 |
configure_gemini, inputs=[gemini_key, gemini_model], outputs=[gemini_status] # Update Gemini status
|
981 |
).then(
|
982 |
-
|
983 |
-
|
|
|
984 |
)
|
985 |
|
986 |
-
# Handle Gemini Model Selector change:
|
987 |
model_selector.change(
|
988 |
lambda m: m, inputs=[model_selector], outputs=[gemini_model] # Update gemini_model state
|
989 |
).then(
|
990 |
configure_gemini, inputs=[gemini_key, gemini_model], outputs=[gemini_status] # Update Gemini status
|
991 |
).then(
|
992 |
-
|
993 |
-
|
|
|
994 |
)
|
995 |
|
996 |
# Handle Grounding checkbox change: update grounding state
|
@@ -1012,9 +985,10 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
1012 |
|
1013 |
# The main event handler for the Send button
|
1014 |
# This .click() event triggers the ai_workflow_chat generator function
|
|
|
|
|
1015 |
send_btn.click(
|
1016 |
ai_workflow_chat, # The generator function to run
|
1017 |
-
# Inputs are read from UI components AND State variables
|
1018 |
inputs=[
|
1019 |
user_input, chatbot, # UI component inputs (message, current chat history)
|
1020 |
hf_profile, hf_token, # HF State variables
|
@@ -1026,8 +1000,6 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
1026 |
debug_attempts, app_description, repo_name_state, generated_code_state, # Other State variables
|
1027 |
use_grounding_state # Add the new grounding state input
|
1028 |
],
|
1029 |
-
# Outputs update UI components AND State variables.
|
1030 |
-
# The order MUST match the tuple yielded by the generator function.
|
1031 |
outputs=[
|
1032 |
chatbot, # Update Chatbot with new messages
|
1033 |
repo_id, workflow, # Update workflow State variables
|
@@ -1043,7 +1015,7 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
1043 |
)
|
1044 |
|
1045 |
|
1046 |
-
# --- Initial Load Event Chain (Defined INSIDE gr.Blocks, AFTER components) ---
|
1047 |
# This chain runs once when the app loads
|
1048 |
ai_builder_tab.load(
|
1049 |
# Action 1: Show profile (loads cached login if available), does NOT need inputs
|
@@ -1057,11 +1029,11 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
1057 |
inputs=[gemini_key, gemini_model],
|
1058 |
outputs=[gemini_status] # Update Gemini status display
|
1059 |
).then(
|
1060 |
-
# Action 3: Update the send button
|
1061 |
# This uses the *initial* values of hf_profile, hf_token, gemini_key, and gemini_model states
|
1062 |
-
|
1063 |
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
|
1064 |
-
outputs=[send_btn
|
1065 |
).then(
|
1066 |
# Action 4: Add the initial welcome message to the chatbot
|
1067 |
greet,
|
|
|
55 |
space_sdk=sdk
|
56 |
)
|
57 |
url = f"https://huggingface.co/spaces/{repo_id}"
|
58 |
+
iframe = f'<iframe src="{url}" width="100%" height="500px"></iframe>'
|
59 |
return repo_id, iframe
|
60 |
except Exception as e:
|
61 |
raise RuntimeError(f"Failed to create Space `{repo_id}`: {e}")
|
|
|
140 |
def configure_gemini(api_key: str | None, model_name: str | None) -> str:
|
141 |
"""Configures the Gemini API and checks if the model is accessible."""
|
142 |
if not api_key:
|
143 |
+
# Keep the message about key not set
|
144 |
return "⚠️ Gemini API key is not set."
|
145 |
if not model_name:
|
146 |
+
# Keep the message about model not selected
|
147 |
return "⚠️ Please select a Gemini model."
|
148 |
try:
|
149 |
genai.configure(api_key=api_key)
|
150 |
# Attempt a simple call to verify credentials and model availability
|
151 |
# This will raise an exception if the key is invalid or model not found
|
152 |
genai.GenerativeModel(model_name).generate_content("ping", stream=False)
|
153 |
+
# This message indicates the API call *for configuration check* was successful
|
154 |
return f"✅ Gemini configured successfully with **{model_name}**."
|
155 |
except Exception as e:
|
156 |
+
# This message indicates the API call *for configuration check* failed
|
157 |
return f"❌ Error configuring Gemini: {e}"
|
158 |
|
159 |
def call_gemini(prompt: str, api_key: str, model_name: str, use_grounding: bool = False) -> str:
|
160 |
"""Calls the Gemini API with a given prompt, optionally using grounding."""
|
161 |
+
# This check is crucial - it will raise an error *before* the API call if prereqs aren't met
|
162 |
if not api_key or not model_name:
|
163 |
raise ValueError("Gemini API key or model not set.")
|
164 |
try:
|
|
|
222 |
history.append({"role": "assistant", "content": bot_message})
|
223 |
return history
|
224 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
225 |
# Add an initial welcome message to the chatbot (defined outside Blocks to be called by load chain)
|
|
|
226 |
def greet():
|
227 |
return [{"role": "assistant", "content": "Welcome! Please log in to Hugging Face and provide your Google AI Studio API key to start building Spaces. Once ready, type 'generate me a gradio app called myapp' or 'create' to begin."}]
|
228 |
|
|
|
311 |
return # Exit the generator for this click
|
312 |
|
313 |
if not (gemini_api_key and gemini_model):
|
314 |
+
# The button is *enabled* if HF is logged in, but we still need Gemini for the workflow
|
315 |
+
history = add_bot_message(history, "Workflow cannot start: Please enter your API key and select a Gemini model.")
|
316 |
# Yield updated history and current state, then exit for this click
|
317 |
yield (history, repo_id, state, updated_preview, updated_run, updated_build,
|
318 |
attempts, app_desc, repo_name, generated_code, use_grounding)
|
|
|
525 |
reqs_list = ["gradio"] if space_sdk == "gradio" else ["streamlit"]
|
526 |
# Add essential libraries regardless of description keywords or grounding
|
527 |
essential_libs = ["google-generativeai", "huggingface_hub"]
|
528 |
+
# Only add if Gemini is actually needed for the app (determined by description or if key is present)
|
529 |
+
# If we are here, key and model are available based on STATE_IDLE checks
|
530 |
+
reqs_list.extend(essential_libs)
|
531 |
|
532 |
# Add common libraries if description suggests they might be needed
|
533 |
if app_desc:
|
|
|
906 |
status_text = gr.Textbox(label="Current State", value=STATE_IDLE, interactive=False)
|
907 |
repo_id_text = gr.Textbox(label="Current Space ID", value="None", interactive=False)
|
908 |
|
|
|
|
|
|
|
909 |
|
910 |
# Main content area column
|
911 |
with gr.Column(scale=3):
|
|
|
924 |
|
925 |
# --- Define Event Handlers and Chains AFTER all components are defined ---
|
926 |
|
927 |
+
# Helper function to update send button interactivity based on prereqs
|
928 |
+
def update_send_button_interactivity(profile, token, api_key, model_name):
|
929 |
+
is_logged_in = profile is not None and token is not None
|
930 |
+
is_gemini_ready = api_key is not None and model_name is not None
|
931 |
+
is_ready = is_logged_in and is_gemini_ready
|
932 |
+
print(f"update_send_button_interactivity - HF Ready: {is_logged_in}, Gemini Ready: {is_gemini_ready}, Button Ready: {is_ready}")
|
933 |
+
return gr.update(interactive=is_ready)
|
934 |
+
|
935 |
+
|
936 |
+
# Handle login button click: Update profile/token state -> Update send button interactivity
|
937 |
login_btn.click(
|
|
|
938 |
lambda x: (x[0], x[1]),
|
939 |
inputs=[login_btn],
|
940 |
+
outputs=[hf_profile, hf_token] # Update HF State variables
|
941 |
+
).then(
|
942 |
+
update_send_button_interactivity,
|
|
|
943 |
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
|
944 |
+
outputs=[send_btn] # Update button interactivity
|
945 |
)
|
946 |
|
947 |
+
# Handle Gemini Key Input change: Update key state -> Configure Gemini status -> Update send button interactivity
|
948 |
gemini_input.change(
|
949 |
lambda k: k, inputs=[gemini_input], outputs=[gemini_key] # Update gemini_key state
|
950 |
).then(
|
951 |
configure_gemini, inputs=[gemini_key, gemini_model], outputs=[gemini_status] # Update Gemini status
|
952 |
).then(
|
953 |
+
update_send_button_interactivity,
|
954 |
+
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
|
955 |
+
outputs=[send_btn] # Update button interactivity
|
956 |
)
|
957 |
|
958 |
+
# Handle Gemini Model Selector change: Update model state -> Configure Gemini status -> Update send button interactivity
|
959 |
model_selector.change(
|
960 |
lambda m: m, inputs=[model_selector], outputs=[gemini_model] # Update gemini_model state
|
961 |
).then(
|
962 |
configure_gemini, inputs=[gemini_key, gemini_model], outputs=[gemini_status] # Update Gemini status
|
963 |
).then(
|
964 |
+
update_send_button_interactivity,
|
965 |
+
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
|
966 |
+
outputs=[send_btn] # Update button interactivity
|
967 |
)
|
968 |
|
969 |
# Handle Grounding checkbox change: update grounding state
|
|
|
985 |
|
986 |
# The main event handler for the Send button
|
987 |
# This .click() event triggers the ai_workflow_chat generator function
|
988 |
+
# Inputs are read from UI components AND State variables
|
989 |
+
# Outputs are updated by the values yielded from the generator
|
990 |
send_btn.click(
|
991 |
ai_workflow_chat, # The generator function to run
|
|
|
992 |
inputs=[
|
993 |
user_input, chatbot, # UI component inputs (message, current chat history)
|
994 |
hf_profile, hf_token, # HF State variables
|
|
|
1000 |
debug_attempts, app_description, repo_name_state, generated_code_state, # Other State variables
|
1001 |
use_grounding_state # Add the new grounding state input
|
1002 |
],
|
|
|
|
|
1003 |
outputs=[
|
1004 |
chatbot, # Update Chatbot with new messages
|
1005 |
repo_id, workflow, # Update workflow State variables
|
|
|
1015 |
)
|
1016 |
|
1017 |
|
1018 |
+
# --- Initial Load Event Chain (Defined INSIDE gr.Blocks, AFTER components and initial bindings) ---
|
1019 |
# This chain runs once when the app loads
|
1020 |
ai_builder_tab.load(
|
1021 |
# Action 1: Show profile (loads cached login if available), does NOT need inputs
|
|
|
1029 |
inputs=[gemini_key, gemini_model],
|
1030 |
outputs=[gemini_status] # Update Gemini status display
|
1031 |
).then(
|
1032 |
+
# Action 3: Update the send button interactivity based on initial states
|
1033 |
# This uses the *initial* values of hf_profile, hf_token, gemini_key, and gemini_model states
|
1034 |
+
update_send_button_interactivity, # Use the dedicated helper
|
1035 |
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
|
1036 |
+
outputs=[send_btn] # Update button interactivity
|
1037 |
).then(
|
1038 |
# Action 4: Add the initial welcome message to the chatbot
|
1039 |
greet,
|