Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -12,6 +12,10 @@ from google.generativeai import types # Import types for configuration and tools
|
|
12 |
from huggingface_hub import create_repo, list_models, upload_file, constants
|
13 |
from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status
|
14 |
|
|
|
|
|
|
|
|
|
15 |
# --- Helper functions for Hugging Face integration ---
|
16 |
|
17 |
def show_profile(profile: gr.OAuthProfile | None) -> str:
|
@@ -304,7 +308,7 @@ def ai_workflow_chat(
|
|
304 |
history = add_bot_message(history, "Workflow reset.")
|
305 |
# Yield updated history and reset state variables to their initial values
|
306 |
yield (history, None, STATE_IDLE, "<p>No Space created yet.</p>", "", "", 0,
|
307 |
-
None, None, None,
|
308 |
# No return needed after yield in this generator pattern; execution for this click ends here.
|
309 |
|
310 |
elif generate_match:
|
@@ -417,6 +421,8 @@ Return **only** the python code block for `app.py`. Do not include any extra tex
|
|
417 |
"""
|
418 |
try:
|
419 |
history = add_bot_message(history, f"🧠 Generating `{prompt_desc}` `{space_sdk}` app (`app.py`) code with Gemini...")
|
|
|
|
|
420 |
# Yield to show message before the potentially time-consuming API call
|
421 |
yield (history, repo_id, state, updated_preview, updated_run, updated_build,
|
422 |
attempts, app_desc, repo_name, generated_code, use_grounding) # Include use_grounding
|
@@ -513,7 +519,7 @@ Return **only** the python code block for `app.py`. Do not include any extra tex
|
|
513 |
# Yield updated state variables and history
|
514 |
yield (history, repo_id, state, updated_preview, updated_run, updated_build,
|
515 |
attempts, app_desc, repo_name, generated_code, use_grounding) # Include use_grounding
|
516 |
-
|
517 |
|
518 |
|
519 |
elif state == STATE_UPLOADING_REQUIREMENTS:
|
@@ -687,6 +693,8 @@ This Space was automatically generated by an AI workflow using Google Gemini and
|
|
687 |
|
688 |
elif state == STATE_DEBUGGING_CODE:
|
689 |
history = add_bot_message(history, f"🧠 Calling Gemini to generate fix based on logs...")
|
|
|
|
|
690 |
# Yield message before Gemini API call
|
691 |
yield (history, repo_id, state, updated_preview, updated_run, updated_build,
|
692 |
attempts, app_desc, repo_name, generated_code, use_grounding) # Include use_grounding
|
@@ -696,6 +704,8 @@ This Space was automatically generated by an AI workflow using Google Gemini and
|
|
696 |
You are debugging a {space_sdk} Space. The goal is to fix the code in `app.py` based on the container logs provided.
|
697 |
|
698 |
Here are the container logs:
|
|
|
|
|
699 |
{updated_run}
|
700 |
Generate the *complete, fixed* content for `app.py` based on these logs.
|
701 |
Return **only** the python code block for app.py. Do not include any extra text, explanations, or markdown outside the code block.
|
@@ -790,7 +800,7 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
790 |
# Gradio State variables - these persist their values across user interactions (clicks)
|
791 |
hf_profile = gr.State(None)
|
792 |
hf_token = gr.State(None)
|
793 |
-
# FIX: Initialize gemini_key state from env var on load
|
794 |
gemini_key = gr.State(os.environ.get("GOOGLE_API_KEY"))
|
795 |
gemini_model = gr.State("gemini-1.5-flash") # Default selected model
|
796 |
repo_id = gr.State(None) # Stores the ID of the created Space
|
@@ -811,11 +821,36 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
811 |
# Hugging Face Login Button
|
812 |
login_btn = gr.LoginButton(variant="huggingface")
|
813 |
|
814 |
-
# Initial load event to check login status (if cached)
|
815 |
-
#
|
816 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
817 |
# Update status display when login button reports success
|
818 |
-
login_btn.click(show_profile, outputs=login_status)
|
|
|
|
|
|
|
|
|
819 |
|
820 |
gr.Markdown("## Google AI Studio / Gemini")
|
821 |
# Textbox for Gemini API key. Read from environment variable if available.
|
@@ -840,14 +875,25 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
840 |
interactive=True
|
841 |
)
|
842 |
|
843 |
-
#
|
844 |
-
|
845 |
-
|
846 |
-
|
847 |
-
inputs=[gemini_key, gemini_model],
|
848 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
849 |
)
|
850 |
|
|
|
851 |
# New checkbox for optional grounding
|
852 |
use_grounding_checkbox = gr.Checkbox(
|
853 |
label="Enable Grounding with Google Search",
|
@@ -898,6 +944,8 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
898 |
**kwargs
|
899 |
):
|
900 |
"""Determines if the send button should be active and updates status text."""
|
|
|
|
|
901 |
is_logged_in = profile is not None and token is not None
|
902 |
is_gemini_ready = key is not None and model is not None # Check if key and model are set
|
903 |
|
@@ -922,70 +970,6 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
922 |
# gr.update is used to dynamically change a component's properties
|
923 |
return gr.update(interactive=is_ready), status_str
|
924 |
|
925 |
-
# --- Implement Chained Events for Prerequisites ---
|
926 |
-
# Gradio's `.then()` allows chaining events: Action A happens, then Action B happens.
|
927 |
-
|
928 |
-
# 1. Login Button: When clicked and successful, update profile/token state,
|
929 |
-
# THEN update send button state based on all prereqs.
|
930 |
-
login_btn.click(
|
931 |
-
# The LoginButton outputs a tuple (OAuthProfile, OAuthToken) on success
|
932 |
-
lambda x: (x[0], x[1]),
|
933 |
-
inputs=[login_btn],
|
934 |
-
outputs=[hf_profile, hf_token] # Update these State variables
|
935 |
-
).then( # Chain the next action after state is updated
|
936 |
-
update_send_button_state,
|
937 |
-
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
|
938 |
-
outputs=[send_btn, prereq_status] # Update button interactivity and status text
|
939 |
-
)
|
940 |
-
|
941 |
-
# 2. Gemini Key Input: When text changes, update key state,
|
942 |
-
# THEN configure Gemini status, THEN update send button state.
|
943 |
-
# The Textbox 'change' event passes the new value as its input
|
944 |
-
gemini_input.change(
|
945 |
-
lambda k: k, # Simple function to pass the new value to the state variable
|
946 |
-
inputs=[gemini_input],
|
947 |
-
outputs=[gemini_key] # Update gemini_key state variable
|
948 |
-
).then( # Chain configure_gemini after key state is updated
|
949 |
-
configure_gemini,
|
950 |
-
inputs=[gemini_key, gemini_model],
|
951 |
-
outputs=[gemini_status] # Update Gemini status text
|
952 |
-
).then( # Chain update_send_button_state after config status is updated
|
953 |
-
update_send_button_state,
|
954 |
-
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
|
955 |
-
outputs=[send_btn, prereq_status] # Update button interactivity and status text
|
956 |
-
)
|
957 |
-
|
958 |
-
# 3. Gemini Model Selector: When selection changes, update model state,
|
959 |
-
# THEN configure Gemini status, THEN update send button state.
|
960 |
-
# The Radio 'change' event passes the new value as its input
|
961 |
-
model_selector.change(
|
962 |
-
lambda m: m, # Simple function to pass the new value to the state variable
|
963 |
-
inputs=[model_selector],
|
964 |
-
outputs=[gemini_model] # Update gemini_model state variable
|
965 |
-
).then( # Chain configure_gemini after model state is updated
|
966 |
-
configure_gemini,
|
967 |
-
inputs=[gemini_key, gemini_model],
|
968 |
-
outputs=[gemini_status] # Update Gemini status text
|
969 |
-
).then( # Chain update_send_button_state after config status is updated
|
970 |
-
update_send_button_state,
|
971 |
-
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
|
972 |
-
outputs=[send_btn, prereq_status] # Update button interactivity and status text
|
973 |
-
)
|
974 |
-
|
975 |
-
# 4. Initial Load: On page load, check prereqs and update send button/status.
|
976 |
-
# This accounts for cached logins or environment variables set before launch.
|
977 |
-
ai_builder_tab.load(
|
978 |
-
update_send_button_state,
|
979 |
-
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
|
980 |
-
outputs=[send_btn, prereq_status] # Update button interactivity and status text
|
981 |
-
)
|
982 |
-
|
983 |
-
# UI elements to display the Space preview iframe and build/run logs
|
984 |
-
iframe = gr.HTML("<p>No Space created yet.</p>") # HTML element for the Space iframe
|
985 |
-
# Textboxes for logs, interactive=False means user can't type here
|
986 |
-
build_txt = gr.Textbox(label="Build Logs", lines=10, interactive=False, value="", max_lines=20) # Set max_lines for scrollability
|
987 |
-
run_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False, value="", max_lines=20) # Set max_lines for scrollability
|
988 |
-
|
989 |
# The main event handler for the Send button
|
990 |
# This .click() event triggers the ai_workflow_chat generator function
|
991 |
send_btn.click(
|
@@ -1047,4 +1031,4 @@ if __name__ == "__main__":
|
|
1047 |
os.makedirs(os.environ["GRADIO_TEMP_DIR"], exist_ok=True) # Ensure the directory exists
|
1048 |
|
1049 |
# Launch the Gradio UI
|
1050 |
-
|
|
|
12 |
from huggingface_hub import create_repo, list_models, upload_file, constants
|
13 |
from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status
|
14 |
|
15 |
+
# Add debugging print to check environment variable immediately
|
16 |
+
print(f"Attempting to read GOOGLE_API_KEY from environment: {os.environ.get('GOOGLE_API_KEY')}")
|
17 |
+
|
18 |
+
|
19 |
# --- Helper functions for Hugging Face integration ---
|
20 |
|
21 |
def show_profile(profile: gr.OAuthProfile | None) -> str:
|
|
|
308 |
history = add_bot_message(history, "Workflow reset.")
|
309 |
# Yield updated history and reset state variables to their initial values
|
310 |
yield (history, None, STATE_IDLE, "<p>No Space created yet.</p>", "", "", 0,
|
311 |
+
None, None, None, False) # Reset use_grounding to default False
|
312 |
# No return needed after yield in this generator pattern; execution for this click ends here.
|
313 |
|
314 |
elif generate_match:
|
|
|
421 |
"""
|
422 |
try:
|
423 |
history = add_bot_message(history, f"🧠 Generating `{prompt_desc}` `{space_sdk}` app (`app.py`) code with Gemini...")
|
424 |
+
if use_grounding:
|
425 |
+
history = add_bot_message(history, "(Using Grounding with Google Search)")
|
426 |
# Yield to show message before the potentially time-consuming API call
|
427 |
yield (history, repo_id, state, updated_preview, updated_run, updated_build,
|
428 |
attempts, app_desc, repo_name, generated_code, use_grounding) # Include use_grounding
|
|
|
519 |
# Yield updated state variables and history
|
520 |
yield (history, repo_id, state, updated_preview, updated_run, updated_build,
|
521 |
attempts, app_desc, repo_name, generated_code, use_grounding) # Include use_grounding
|
522 |
+
# No return needed
|
523 |
|
524 |
|
525 |
elif state == STATE_UPLOADING_REQUIREMENTS:
|
|
|
693 |
|
694 |
elif state == STATE_DEBUGGING_CODE:
|
695 |
history = add_bot_message(history, f"🧠 Calling Gemini to generate fix based on logs...")
|
696 |
+
if use_grounding:
|
697 |
+
history = add_bot_message(history, "(Using Grounding with Google Search)")
|
698 |
# Yield message before Gemini API call
|
699 |
yield (history, repo_id, state, updated_preview, updated_run, updated_build,
|
700 |
attempts, app_desc, repo_name, generated_code, use_grounding) # Include use_grounding
|
|
|
704 |
You are debugging a {space_sdk} Space. The goal is to fix the code in `app.py` based on the container logs provided.
|
705 |
|
706 |
Here are the container logs:
|
707 |
+
Use code with caution.
|
708 |
+
Python
|
709 |
{updated_run}
|
710 |
Generate the *complete, fixed* content for `app.py` based on these logs.
|
711 |
Return **only** the python code block for app.py. Do not include any extra text, explanations, or markdown outside the code block.
|
|
|
800 |
# Gradio State variables - these persist their values across user interactions (clicks)
|
801 |
hf_profile = gr.State(None)
|
802 |
hf_token = gr.State(None)
|
803 |
+
# FIX: Initialize gemini_key state from env var on load for robustness
|
804 |
gemini_key = gr.State(os.environ.get("GOOGLE_API_KEY"))
|
805 |
gemini_model = gr.State("gemini-1.5-flash") # Default selected model
|
806 |
repo_id = gr.State(None) # Stores the ID of the created Space
|
|
|
821 |
# Hugging Face Login Button
|
822 |
login_btn = gr.LoginButton(variant="huggingface")
|
823 |
|
824 |
+
# Initial load event to check login status (if cached) and explicitly set gemini_key state
|
825 |
+
# Chain events: Show profile -> Set Gemini Key state -> Configure Gemini -> Update Send Button
|
826 |
+
def initial_setup_on_load(profile, token):
|
827 |
+
# This function explicitly sets the gemini_key state from the env var
|
828 |
+
# This runs *after* the profile is potentially loaded, ensuring gemini_key is set
|
829 |
+
# before configure_gemini and update_send_button_state read it from state.
|
830 |
+
# It needs to return the values for the next step in the chain.
|
831 |
+
return profile, token, os.environ.get("GOOGLE_API_KEY")
|
832 |
+
|
833 |
+
ai_builder_tab.load(
|
834 |
+
initial_setup_on_load,
|
835 |
+
inputs=[hf_profile, hf_token], # Pass current state values
|
836 |
+
outputs=[hf_profile, hf_token, gemini_key] # Update state values
|
837 |
+
).then( # Chain after initial state setup
|
838 |
+
configure_gemini,
|
839 |
+
inputs=[gemini_key, gemini_model],
|
840 |
+
outputs=[gemini_status] # Update Gemini status text
|
841 |
+
).then( # Chain after config status
|
842 |
+
update_send_button_state,
|
843 |
+
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
|
844 |
+
outputs=[send_btn, prereq_status] # Update button interactivity and status text
|
845 |
+
)
|
846 |
+
|
847 |
+
|
848 |
# Update status display when login button reports success
|
849 |
+
login_btn.click(show_profile, outputs=login_status).then(
|
850 |
+
update_send_button_state, # Recalculate status and button state
|
851 |
+
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
|
852 |
+
outputs=[send_btn, prereq_status]
|
853 |
+
)
|
854 |
|
855 |
gr.Markdown("## Google AI Studio / Gemini")
|
856 |
# Textbox for Gemini API key. Read from environment variable if available.
|
|
|
875 |
interactive=True
|
876 |
)
|
877 |
|
878 |
+
# Gemini Key Input change: update key state -> configure Gemini -> update send button
|
879 |
+
gemini_input.change(
|
880 |
+
lambda k: k, inputs=[gemini_input], outputs=[gemini_key] # Update gemini_key state
|
881 |
+
).then(
|
882 |
+
configure_gemini, inputs=[gemini_key, gemini_model], outputs=[gemini_status] # Update Gemini status
|
883 |
+
).then(
|
884 |
+
update_send_button_state, inputs=[hf_profile, hf_token, gemini_key, gemini_model], outputs=[send_btn, prereq_status] # Update button/prereq status
|
885 |
+
)
|
886 |
+
|
887 |
+
# Gemini Model Selector change: update model state -> configure Gemini -> update send button
|
888 |
+
model_selector.change(
|
889 |
+
lambda m: m, inputs=[model_selector], outputs=[gemini_model] # Update gemini_model state
|
890 |
+
).then(
|
891 |
+
configure_gemini, inputs=[gemini_key, gemini_model], outputs=[gemini_status] # Update Gemini status
|
892 |
+
).then(
|
893 |
+
update_send_button_state, inputs=[hf_profile, hf_token, gemini_key, gemini_model], outputs=[send_btn, prereq_status] # Update button/prereq status
|
894 |
)
|
895 |
|
896 |
+
|
897 |
# New checkbox for optional grounding
|
898 |
use_grounding_checkbox = gr.Checkbox(
|
899 |
label="Enable Grounding with Google Search",
|
|
|
944 |
**kwargs
|
945 |
):
|
946 |
"""Determines if the send button should be active and updates status text."""
|
947 |
+
# Add debug prints to see what values this function is getting
|
948 |
+
print(f"update_send_button_state - profile: {profile is not None}, token: {token is not None}, key: {key is not None}, model: {model is not None}")
|
949 |
is_logged_in = profile is not None and token is not None
|
950 |
is_gemini_ready = key is not None and model is not None # Check if key and model are set
|
951 |
|
|
|
970 |
# gr.update is used to dynamically change a component's properties
|
971 |
return gr.update(interactive=is_ready), status_str
|
972 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
973 |
# The main event handler for the Send button
|
974 |
# This .click() event triggers the ai_workflow_chat generator function
|
975 |
send_btn.click(
|
|
|
1031 |
os.makedirs(os.environ["GRADIO_TEMP_DIR"], exist_ok=True) # Ensure the directory exists
|
1032 |
|
1033 |
# Launch the Gradio UI
|
1034 |
+
# The Gradio launch call blo
|