testtsettset / app.py
wuhp's picture
Update app.py
5cac637 verified
raw
history blame
36.4 kB
import os
import re
import time
import json
import io
import requests
import gradio as gr
import google.generativeai as genai
from huggingface_hub import create_repo, list_models, upload_file, constants
from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status
# --- Helper functions for Hugging Face integration ---
def show_profile(profile: gr.OAuthProfile | None) -> str:
if profile is None:
return "*Not logged in.*"
return f"✅ Logged in as **{profile.username}**"
def list_private_models(
profile: gr.OAuthProfile | None,
oauth_token: gr.OAuthToken | None
) -> str:
if profile is None or oauth_token is None:
return "Please log in to see your models."
try:
models = [
f"{m.id} ({'private' if m.private else 'public'})"
for m in list_models(author=profile.username, token=oauth_token.token)
]
return "No models found." if not models else "Models:\n\n" + "\n - ".join(models)
except Exception as e:
return f"Error listing models: {e}"
def create_space_action(repo_name: str, sdk: str, profile: gr.OAuthProfile, token: gr.OAuthToken):
repo_id = f"{profile.username}/{repo_name}"
try:
create_repo(
repo_id=repo_id,
token=token.token,
exist_ok=True,
repo_type="space",
space_sdk=sdk
)
url = f"https://huggingface.co/spaces/{repo_id}"
iframe = f'<iframe src="{url}" width="100%" height="500px"></iframe>'
return repo_id, iframe
except Exception as e:
raise RuntimeError(f"Failed to create Space {repo_id}: {e}")
def upload_file_to_space_action(
file_obj,
path_in_repo: str,
repo_id: str,
profile: gr.OAuthProfile,
token: gr.OAuthToken
) -> None:
if not (profile and token and repo_id):
raise ValueError("Hugging Face profile, token, or repo_id is missing.")
try:
upload_file(
path_or_fileobj=file_obj,
path_in_repo=path_in_repo,
repo_id=repo_id,
token=token.token,
repo_type="space"
)
except Exception as e:
raise RuntimeError(f"Failed to upload `{path_in_repo}` to {repo_id}: {e}")
def _fetch_space_logs_level(repo_id: str, level: str, token: str) -> str:
if not repo_id or not token:
return f"Cannot fetch {level} logs: repo_id or token missing."
jwt_url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt"
try:
r = get_session().get(jwt_url, headers=build_hf_headers(token=token))
hf_raise_for_status(r)
jwt = r.json()["token"]
logs_url = f"https://api.hf.space/v1/{repo_id}/logs/{level}"
lines, count = [], 0
with get_session().get(logs_url, headers=build_hf_headers(token=jwt), stream=True, timeout=30) as resp:
hf_raise_for_status(resp)
for raw in resp.iter_lines():
if count >= 200:
lines.append("... truncated ...")
break
if not raw.startswith(b"data: "):
continue
payload = raw[len(b"data: "):]
try:
event = json.loads(payload.decode())
ts = event.get("timestamp", "")
txt = event.get("data", "").strip()
if txt:
lines.append(f"[{ts}] {txt}")
count += 1
except json.JSONDecodeError:
continue
return "\n".join(lines) if lines else f"No {level} logs found."
except Exception as e:
return f"Error fetching {level} logs: {e}"
def get_build_logs_action(repo_id, profile, token):
if not (repo_id and profile and token):
return "⚠️ Cannot fetch build logs: log in and create a Space first."
time.sleep(5)
return _fetch_space_logs_level(repo_id, "build", token.token)
def get_container_logs_action(repo_id, profile, token):
if not (repo_id and profile and token):
return "⚠️ Cannot fetch container logs: log in and create a Space first."
time.sleep(10)
return _fetch_space_logs_level(repo_id, "run", token.token)
# --- Google Gemini integration with model selection ---
def configure_gemini(api_key: str | None, model_name: str | None) -> str:
if not api_key:
return "Gemini API key is not set."
if not model_name:
return "Please select a Gemini model."
try:
genai.configure(api_key=api_key)
genai.GenerativeModel(model_name).generate_content("ping", stream=False)
return f"Gemini configured successfully with **{model_name}**."
except Exception as e:
return f"Error configuring Gemini: {e}"
def call_gemini(prompt: str, api_key: str, model_name: str) -> str:
if not api_key or not model_name:
raise ValueError("Gemini API key or model not set.")
try:
genai.configure(api_key=api_key)
model = genai.GenerativeModel(model_name)
response = model.generate_content(prompt, stream=False)
return response.text or ""
except Exception as e:
raise RuntimeError(f"Gemini API call failed: {e}")
# --- AI workflow logic (State Machine) ---
# Define States
STATE_IDLE = "idle"
STATE_AWAITING_REPO_NAME = "awaiting_repo_name"
STATE_CREATING_SPACE = "creating_space"
STATE_GENERATING_CODE = "generating_code"
STATE_UPLOADING_APP_PY = "uploading_app_py"
STATE_GENERATING_REQUIREMENTS = "generating_requirements"
STATE_UPLOADING_REQUIREMENTS = "uploading_requirements"
STATE_GENERATING_README = "generating_readme"
STATE_UPLOADING_README = "uploading_readme"
STATE_CHECKING_LOGS_BUILD = "checking_logs_build"
STATE_CHECKING_LOGS_RUN = "checking_logs_run"
STATE_DEBUGGING_CODE = "debugging_code"
STATE_UPLOADING_FIXED_APP_PY = "uploading_fixed_app_py"
STATE_COMPLETE = "complete"
MAX_DEBUG_ATTEMPTS = 3
def add_bot_message(history: list[dict], bot_message: str) -> list[dict]:
"""Helper to add a new assistant message to the history."""
history.append({"role": "assistant", "content": bot_message})
return history
def ai_workflow_chat(
message: str,
history: list[dict], # Chatbot history format changed to list[dict]
hf_profile: gr.OAuthProfile | None,
hf_token: gr.OAuthToken | None,
gemini_api_key: str | None,
gemini_model: str | None,
repo_id_state: str | None,
workflow_state: str,
space_sdk: str,
preview_html: str,
container_logs: str,
build_logs: str,
debug_attempts_state: int,
app_description_state: str | None,
repo_name_state: str | None,
generated_code_state: str | None,
) -> tuple[
list[dict], # history
str | None, # repo_id
str, # workflow_state
str, # preview_html
str, # container_logs
str, # build_logs
int, # debug_attempts_state
str | None, # app_description_state
str | None, # repo_name_state
str | None, # generated_code_state
]:
# Unpack state variables
repo_id = repo_id_state
state = workflow_state
attempts = debug_attempts_state
app_desc = app_description_state
repo_name = repo_name_state
generated_code = generated_code_state
updated_preview = preview_html
updated_build = build_logs
updated_run = container_logs
# Add user message to history in the new format
user_message_entry = {"role": "user", "content": message}
if hf_profile and hf_profile.username:
user_message_entry["name"] = hf_profile.username
history.append(user_message_entry)
# Yield immediately to show user message - this is the first yield
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
try:
# --- State Machine Logic ---
if state == STATE_IDLE:
# Check prerequisites first
if not (hf_profile and hf_token):
history = add_bot_message(history, "Please log in to Hugging Face first.")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
return
if not (gemini_api_key and gemini_model):
history = add_bot_message(history, "Please enter your API key and select a Gemini model.")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
return
# Look for commands
reset_match = "reset" in message.lower()
generate_match = re.search(r'generate (?:me )?(?:a|an) \w+ app called (\w+)', message, re.I)
create_match = re.search(r'create (?:a|an)? space called (\w+)', message, re.I)
if reset_match:
history = add_bot_message(history, "Workflow reset.")
yield history, None, STATE_IDLE, "<p>No Space created yet.</p>", "", "", 0, None, None, None
return
elif generate_match:
new_repo_name = generate_match.group(1)
new_app_desc = message
history = add_bot_message(history, f"Acknowledged: '{message}'. Starting workflow to create Space `{hf_profile.username}/{new_repo_name}`.")
state = STATE_CREATING_SPACE
repo_name = new_repo_name
app_desc = new_app_desc
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
elif create_match:
new_repo_name = create_match.group(1)
history = add_bot_message(history, f"Acknowledged: '{message}'. Starting workflow to create Space `{hf_profile.username}/{new_repo_name}`.")
state = STATE_CREATING_SPACE
repo_name = new_repo_name
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
elif "create" in message.lower() and not repo_id:
history = add_bot_message(history, "Okay, what should the Space be called? (e.g., `my-awesome-app`)")
state = STATE_AWAITING_REPO_NAME
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
else:
history = add_bot_message(history, "Command not recognized. Try 'generate me a gradio app called myapp', or 'reset'.")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
return
if state == STATE_AWAITING_REPO_NAME:
new_repo_name = message.strip()
if not new_repo_name or re.search(r'[^a-zA-Z0-9_-]', new_repo_name): # More robust validation
history = add_bot_message(history, "Invalid name. Please provide a single word/slug for the Space name (letters, numbers, underscores, hyphens only).")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
else:
history = add_bot_message(history, f"Using Space name `{new_repo_name}`. Creating Space `{hf_profile.username}/{new_repo_name}`...")
state = STATE_CREATING_SPACE
repo_name = new_repo_name
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
elif state == STATE_CREATING_SPACE:
if not repo_name:
history = add_bot_message(history, "Internal error: Repo name missing for creation. Resetting.")
yield history, None, STATE_IDLE, "<p>Error creating space.</p>", "", "", 0, None, None, None
return
try:
new_repo_id, iframe_html = create_space_action(repo_name, space_sdk, hf_profile, hf_token)
updated_preview = iframe_html
repo_id = new_repo_id
history = add_bot_message(history, f"✅ Space `{repo_id}` created. Click 'Send' to generate and upload code.")
state = STATE_GENERATING_CODE
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
except Exception as e:
history = add_bot_message(history, f"❌ Error creating space: {e}. Click 'reset'.")
yield history, None, STATE_IDLE, "<p>Error creating space.</p>", "", "", 0, None, None, None
elif state == STATE_GENERATING_CODE:
prompt_desc = app_desc if app_desc else 'a Gradio image-blur test app with upload and slider controls'
prompt = f"""
You are an AI assistant specializing in Hugging Face Spaces using the {space_sdk} SDK.
Generate a full, single-file Python app based on:
'{prompt_desc}'
Return **only** the python code block for app.py. Do not include any extra text, explanations, or markdown outside the code block.
"""
try:
history = add_bot_message(history, "🧠 Generating `app.py` code with Gemini...")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
code = call_gemini(prompt, gemini_api_key, gemini_model)
code = code.strip()
if code.startswith("```python"):
code = code[len("```python"):].strip()
if code.endswith("```"):
code = code[:-len("```")].strip()
if not code:
raise ValueError("Gemini returned empty code.")
history = add_bot_message(history, "✅ `app.py` code generated. Click 'Send' to upload.")
state = STATE_UPLOADING_APP_PY
generated_code = code
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
except Exception as e:
history = add_bot_message(history, f"❌ Error generating code: {e}. Click 'reset'.")
yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
elif state == STATE_UPLOADING_APP_PY:
if not generated_code:
history = add_bot_message(history, "Internal error: No code to upload. Resetting.")
yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
return
history = add_bot_message(history, "☁️ Uploading `app.py`...")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
try:
upload_file_to_space_action(io.StringIO(generated_code), "app.py", repo_id, hf_profile, hf_token)
history = add_bot_message(history, "✅ Uploaded `app.py`. Click 'Send' to generate requirements.")
state = STATE_GENERATING_REQUIREMENTS
generated_code = None
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
except Exception as e:
history = add_bot_message(history, f"❌ Error uploading app.py: {e}. Click 'reset'.")
yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
elif state == STATE_GENERATING_REQUIREMENTS:
history = add_bot_message(history, "📄 Generating `requirements.txt`...")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
reqs_list = ["gradio"] if space_sdk == "gradio" else ["streamlit"]
if "google.generativeai" in str(app_desc).lower() or "gemini" in str(app_desc).lower() or gemini_api_key:
reqs_list.append("google-generativeai")
if "requests" in str(app_desc).lower():
reqs_list.append("requests")
reqs_list.append("huggingface_hub")
if "image" in str(app_desc).lower() or "upload" in str(app_desc).lower() or "blur" in str(app_desc).lower() or "vision" in str(app_desc).lower(): # Added vision
reqs_list.append("Pillow")
if "numpy" in str(app_desc).lower(): reqs_list.append("numpy")
if "pandas" in str(app_desc).lower(): reqs_list.append("pandas")
if "scikit-image" in str(app_desc).lower() or "skimage" in str(app_desc).lower() or "cv2" in str(app_desc).lower() or "opencv-python" in str(app_desc).lower(): # Common image processing deps
reqs_list.append("scikit-image") # skimage
reqs_list.append("opencv-python") # cv2
reqs_list = list(dict.fromkeys(reqs_list)) # Remove duplicates
reqs_content = "\n".join(reqs_list) + "\n"
history = add_bot_message(history, "✅ `requirements.txt` generated. Click 'Send' to upload.")
state = STATE_UPLOADING_REQUIREMENTS
generated_code = reqs_content
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
elif state == STATE_UPLOADING_REQUIREMENTS:
reqs_content_to_upload = generated_code
if not reqs_content_to_upload:
history = add_bot_message(history, "Internal error: No requirements content to upload. Resetting.")
yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
return
history = add_bot_message(history, "☁️ Uploading `requirements.txt`...")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
try:
upload_file_to_space_action(io.StringIO(reqs_content_to_upload), "requirements.txt", repo_id, hf_profile, hf_token)
history = add_bot_message(history, "✅ Uploaded `requirements.txt`. Click 'Send' to generate README.")
state = STATE_GENERATING_README
generated_code = None
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
except Exception as e:
history = add_bot_message(history, f"❌ Error uploading requirements.txt: {e}. Click 'reset'.")
yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
elif state == STATE_GENERATING_README:
history = add_bot_message(history, "📝 Generating `README.md`...")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
readme_title = repo_name if repo_name else "My Awesome Space"
readme_description = app_desc if app_desc else f"This Hugging Face Space hosts an AI-generated {space_sdk} application."
readme_content = f"# {readme_title}\n\n{readme_description}\n\n" \
"This Space was automatically generated by an AI workflow.\n\n" \
f"Built with the {space_sdk} SDK.\n"
history = add_bot_message(history, "✅ `README.md` generated. Click 'Send' to upload.")
state = STATE_UPLOADING_README
generated_code = readme_content
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
elif state == STATE_UPLOADING_README:
readme_content_to_upload = generated_code
if not readme_content_to_upload:
history = add_bot_message(history, "Internal error: No README content to upload. Resetting.")
yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
return
history = add_bot_message(history, "☁️ Uploading `README.md`...")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
try:
upload_file_to_space_action(io.StringIO(readme_content_to_upload), "README.md", repo_id, hf_profile, hf_token)
history = add_bot_message(history, "✅ Uploaded `README.md`. All files uploaded. Space is now building. Click 'Send' to check build logs.")
state = STATE_CHECKING_LOGS_BUILD
generated_code = None
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
except Exception as e:
history = add_bot_message(history, f"❌ Error uploading README.md: {e}. Click 'reset'.")
yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
elif state == STATE_CHECKING_LOGS_BUILD:
history = add_bot_message(history, "🔍 Fetching build logs...")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
build_logs_text = get_build_logs_action(repo_id, hf_profile, hf_token)
updated_build = build_logs_text
if "Error" in updated_build or "Exception" in updated_build or "Build failed" in updated_build:
history = add_bot_message(history, "⚠️ Build logs indicate potential issues. Please inspect above. Click 'Send' to check container logs (app might still start).")
state = STATE_CHECKING_LOGS_RUN
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
else:
history = add_bot_message(history, "✅ Build logs fetched. Click 'Send' to check container logs.")
state = STATE_CHECKING_LOGS_RUN
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
elif state == STATE_CHECKING_LOGS_RUN:
history = add_bot_message(history, "🔍 Fetching container logs...")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
container_logs_text = get_container_logs_action(repo_id, hf_profile, hf_token)
updated_run = container_logs_text
if ("Error" in updated_run or "Exception" in updated_run) and attempts < MAX_DEBUG_ATTEMPTS:
attempts += 1
history = add_bot_message(history, f"❌ Errors detected in container logs. Attempting debug fix #{attempts}/{MAX_DEBUG_ATTEMPTS}. Click 'Send' to proceed.")
state = STATE_DEBUGGING_CODE
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
elif ("Error" in updated_run or "Exception" in updated_run) and attempts >= MAX_DEBUG_ATTEMPTS:
history = add_bot_message(history, f"❌ Errors detected in container logs. Max debug attempts ({MAX_DEBUG_ATTEMPTS}) reached. Please inspect logs manually or click 'reset'.")
state = STATE_COMPLETE
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
else:
history = add_bot_message(history, "✅ App appears to be running successfully! Check the iframe above. Click 'reset' to start a new project.")
state = STATE_COMPLETE
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
elif state == STATE_DEBUGGING_CODE:
history = add_bot_message(history, f"🧠 Calling Gemini to generate fix based on logs...")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
debug_prompt = f"""
You are debugging a {space_sdk} Space. The goal is to fix the code in `app.py` based on the container logs provided.
Here are the container logs:
{updated_run}
Generate the *complete, fixed* content for `app.py` based on these logs.
Return **only** the python code block for app.py. Do not include any extra text, explanations, or markdown outside the code block.
"""
try:
fix_code = call_gemini(debug_prompt, gemini_api_key, gemini_model)
fix_code = fix_code.strip()
if fix_code.startswith("```python"):
fix_code = fix_code[len("```python"):].strip()
if fix_code.endswith("```"):
fix_code = fix_code[:-len("```")].strip()
if not fix_code:
raise ValueError("Gemini returned empty fix code.")
history = add_bot_message(history, "✅ Fix code generated. Click 'Send' to upload.")
state = STATE_UPLOADING_FIXED_APP_PY
generated_code = fix_code
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
except Exception as e:
history = add_bot_message(history, f"❌ Error generating debug code: {e}. Click 'reset'.")
yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
elif state == STATE_UPLOADING_FIXED_APP_PY:
fixed_code_to_upload = generated_code
if not fixed_code_to_upload:
history = add_bot_message(history, "Internal error: No fixed code available to upload. Resetting.")
yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
return
history = add_bot_message(history, "☁️ Uploading fixed `app.py`...")
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
try:
upload_file_to_space_action(io.StringIO(fixed_code_to_upload), "app.py", repo_id, hf_profile, hf_token)
history = add_bot_message(history, "✅ Fixed `app.py` uploaded. Space will rebuild. Click 'Send' to check logs again.")
state = STATE_CHECKING_LOGS_RUN
generated_code = None
yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
except Exception as e:
history = add_bot_message(history, f"❌ Error uploading fixed app.py: {e}. Click 'reset'.")
yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
elif state == STATE_COMPLETE:
# Workflow finished. Stay in this state until reset.
# The message is set in the state that transitioned to COMPLETE.
pass # No further action needed in this state
except Exception as e:
# Catch-all for unexpected exceptions in any state
error_message = f"Workflow step failed unexpectedly ({state}): {e}. Click 'Send' to re-attempt this step or 'reset'."
history = add_bot_message(history, error_message)
print(f"Critical Error in state {state}: {e}")
# Transition to idle state on unexpected errors
yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
return # End generator
# Note: Each state block should end with a yield to update the UI/state.
# If a state block finishes without yielding, the generator will stop until the next call.
# By yielding the state variables *after* adding the user message, and then yielding
# again after state transitions/actions *within* the state logic, we ensure updates.
# The final yield after the try/except is removed as yields should happen within the state logic.
# --- Build the Gradio UI ---
with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
# State variables
hf_profile = gr.State(None)
hf_token = gr.State(None)
gemini_key = gr.State(None)
gemini_model = gr.State("gemini-1.5-flash")
repo_id = gr.State(None)
workflow = gr.State(STATE_IDLE)
sdk_state = gr.State("gradio")
debug_attempts = gr.State(0)
app_description = gr.State(None)
repo_name_state = gr.State(None)
generated_code_state = gr.State(None) # Reused
with gr.Row():
# Sidebar
with gr.Column(scale=1, min_width=300):
gr.Markdown("## Hugging Face Login")
login_status = gr.Markdown("*Not logged in.*")
login_btn = gr.LoginButton(variant="huggingface")
# Initial load to check login status
# Removed _preprocess=False
ai_builder_tab.load(show_profile, outputs=login_status)
# Update status on login click
login_btn.click(show_profile, outputs=login_status)
# Store profile and token in state on login click
login_btn.click(lambda x: x, inputs=[login_btn], outputs=[hf_profile, hf_token])
gr.Markdown("## Google AI Studio API Key")
gemini_input = gr.Textbox(label="API Key", type="password", interactive=True)
gemini_status = gr.Markdown("")
gemini_input.change(lambda k: k, inputs=gemini_input, outputs=gemini_key)
gr.Markdown("## Gemini Model")
model_selector = gr.Radio(
choices=[
("Gemini 1.5 Flash", "gemini-1.5-flash"),
("Gemini 1.5 Pro", "gemini-1.5-pro"),
("Gemini 1.0 Pro", "gemini-1.0-pro"),
],
value="gemini-1.5-flash",
label="Select model",
interactive=True
)
model_selector.change(lambda m: m, inputs=model_selector, outputs=gemini_model)
# Configure Gemini status on load and when key/model changes
# Removed _preprocess=False from load
ai_builder_tab.load(
configure_gemini,
inputs=[gemini_key, gemini_model],
outputs=[gemini_status]
)
# Keep _preprocess=False on change events as they have implicit outputs (the new value)
gemini_input.change(
configure_gemini,
inputs=[gemini_key, gemini_model],
outputs=[gemini_status],
_preprocess=False
)
model_selector.change(
configure_gemini,
inputs=[gemini_key, gemini_model],
outputs=[gemini_status],
_preprocess=False
)
gr.Markdown("## Space SDK")
sdk_selector = gr.Radio(choices=["gradio","streamlit"], value="gradio", label="Template SDK", interactive=True)
sdk_selector.change(lambda s: s, inputs=sdk_selector, outputs=sdk_state)
gr.Markdown("## Workflow Status")
status_text = gr.Textbox(label="Current State", value=STATE_IDLE, interactive=False)
repo_id_text = gr.Textbox(label="Current Space ID", value="None", interactive=False)
# Main content
with gr.Column(scale=3):
chatbot = gr.Chatbot(type='messages', label="AI Workflow Chat")
user_input = gr.Textbox(placeholder="Type your message…", interactive=True)
send_btn = gr.Button("Send", interactive=False)
# Logic to enable send button only when logged in and API key is set
def update_send_button_state(profile: gr.OAuthProfile | None, token: gr.OAuthToken | None, key: str | None, model: str | None):
is_logged_in = profile is not None and token is not None
is_gemini_ready = key is not None and model is not None
return gr.update(interactive=is_logged_in and is_gemini_ready)
# Removed _preprocess=False from load
ai_builder_tab.load(
update_send_button_state,
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
outputs=[send_btn]
)
# Keep _preprocess=False on click/change events with explicit inputs
login_btn.click(
update_send_button_state,
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
outputs=[send_btn],
_preprocess=False
)
gemini_input.change(
update_send_button_state,
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
outputs=[send_btn],
_preprocess=False
)
model_selector.change(
update_send_button_state,
inputs=[hf_profile, hf_token, gemini_key, gemini_model],
outputs=[send_btn],
_preprocess=False
)
iframe = gr.HTML("<p>No Space created yet.</p>")
build_txt = gr.Textbox(label="Build Logs", lines=10, interactive=False, value="")
run_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False, value="")
# The main event handler for the Send button
send_btn.click(
ai_workflow_chat,
inputs=[
user_input, chatbot,
hf_profile, hf_token,
gemini_key, gemini_model,
repo_id, workflow, sdk_state,
iframe, run_txt, build_txt,
debug_attempts, app_description, repo_name_state, generated_code_state
],
outputs=[
chatbot,
repo_id, workflow,
iframe, run_txt, build_txt,
debug_attempts, app_description, repo_name_state, generated_code_state
]
).success( # Clear input after successful send
lambda: gr.update(value=""),
inputs=None,
outputs=user_input
)
# Link state variables to UI status displays (reactive updates)
workflow.change(lambda s: s, inputs=workflow, outputs=status_text)
repo_id.change(lambda r: r if r else "None", inputs=repo_id, outputs=repo_id_text)
# Add an initial message to the chatbot on load
def greet():
return [{"role": "assistant", "content": "Welcome! Please log in to Hugging Face and provide your Google AI Studio API key to start building Spaces. Once ready, type 'generate me a gradio app called myapp' or 'create' to begin."}]
# THIS CALL IS INSIDE the with gr.Blocks() block
ai_builder_tab.load(greet, outputs=chatbot)
if __name__ == "__main__":
# Optional: Configure retries for huggingface_hub requests
# from requests.adapters import HTTPAdapter
# from urllib3.util.retry import Retry
# retry_strategy = Retry(total=5, backoff_factor=1, status_forcelist=[429, 500, 502, 503, 504])
# adapter = HTTPAdapter(max_retries=retry_strategy)
# get_session().mount("http://", adapter)
# get_session().mount("https://", adapter)
ai_builder_tab.launch()