# app.py import os import time import json import requests import gradio as gr import google.generativeai as genai from huggingface_hub import create_repo, list_models, upload_file, constants from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status # --- Helper functions for Hugging Face integration --- def show_profile(profile: gr.OAuthProfile | None) -> str: if profile is None: return "*Not logged in.*" return f"✅ Logged in as **{profile.username}**" def list_private_models(profile: gr.OAuthProfile | None) -> str: if profile is None: return "Please log in to see your models." try: token_obj = profile._token models = [ f"{m.id} ({'private' if m.private else 'public'})" for m in list_models(author=profile.username, token=token_obj.token if token_obj else None) ] return "No models found." if not models else "Models:\n\n" + "\n - ".join(models) except Exception as e: return f"Error listing models: {e}" def create_space_action(repo_name: str, sdk: str, profile: gr.OAuthProfile, token: gr.OAuthToken): repo_id = f"{profile.username}/{repo_name}" create_repo( repo_id=repo_id, token=token.token, exist_ok=True, repo_type="space", space_sdk=sdk ) url = f"https://huggingface.co/spaces/{repo_id}" iframe = f'' return repo_id, iframe def upload_file_to_space_action( file_obj, path_in_repo: str, repo_id: str, profile: gr.OAuthProfile, token: gr.OAuthToken ) -> str: if not (profile and token and repo_id): return "⚠️ Please log in and create a Space first." try: upload_file( path_or_fileobj=file_obj, path_in_repo=path_in_repo, repo_id=repo_id, token=token.token, repo_type="space" ) return f"✅ Uploaded `{path_in_repo}`" except Exception as e: return f"Error uploading file: {e}" def _fetch_space_logs_level(repo_id: str, level: str, token: str) -> str: jwt_url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt" r = get_session().get(jwt_url, headers=build_hf_headers(token=token)) hf_raise_for_status(r) jwt = r.json()["token"] logs_url = f"https://api.hf.space/v1/{repo_id}/logs/{level}" lines, count = [], 0 with get_session().get(logs_url, headers=build_hf_headers(token=jwt), stream=True, timeout=20) as resp: hf_raise_for_status(resp) for raw in resp.iter_lines(): if count >= 200: lines.append("... truncated ...") break if not raw.startswith(b"data: "): continue payload = raw[len(b"data: "):] try: event = json.loads(payload.decode()) ts = event.get("timestamp", "") txt = event.get("data", "").strip() if txt: lines.append(f"[{ts}] {txt}") count += 1 except json.JSONDecodeError: continue return "\n".join(lines) if lines else f"No {level} logs found." def get_build_logs_action(repo_id, profile, token): if not (repo_id and profile and token): return "⚠️ Please log in and create a Space first." return _fetch_space_logs_level(repo_id, "build", token.token) def get_container_logs_action(repo_id, profile, token): if not (repo_id and profile and token): return "⚠️ Please log in and create a Space first." return _fetch_space_logs_level(repo_id, "run", token.token) # --- Google Gemini integration --- def configure_gemini(api_key: str | None) -> str: if not api_key: return "Gemini API key is not set." try: genai.configure(api_key=api_key) genai.GenerativeModel("gemini-pro").generate_content("ping") return "Gemini configured successfully." except Exception as e: return f"Error configuring Gemini: {e}. Please check your API key." def call_gemini(prompt: str, api_key: str) -> str: if not api_key: return "Error: Gemini API key not provided." try: genai.configure(api_key=api_key) model = genai.GenerativeModel("gemini-pro") response = model.generate_content(prompt) return response.text or "Gemini returned an empty response." except Exception as e: return f"Error calling Gemini API: {e}" # --- AI workflow logic --- def ai_workflow_chat( message: str, history: list[list[str | None]], hf_profile: gr.OAuthProfile | None, hf_token: gr.OAuthToken | None, gemini_api_key: str | None, repo_id_state: str | None, workflow_state: str, space_sdk: str, preview_html: str, container_logs: str, build_logs: str ) -> tuple[ list[list[str | None]], str | None, str, str, str, str ]: history.append([message, None]) bot_message = "" new_repo_id = repo_id_state new_workflow = workflow_state updated_preview = preview_html updated_container = container_logs updated_build = build_logs try: # Preliminary checks if not hf_profile or not hf_token: bot_message = "Please log in to Hugging Face first." new_workflow = "awaiting_login" elif not gemini_api_key: bot_message = "Please enter your Google AI Studio API key." new_workflow = "awaiting_api_key" # Starting a new Space elif (new_workflow == "idle" or "create" in message.lower()) and not new_repo_id: bot_message = "What should the Space be called? (e.g., `my-awesome-app`)" new_workflow = "awaiting_repo_name" # User provides a repo name elif new_workflow == "awaiting_repo_name": repo_name = message.strip() if not repo_name: bot_message = "Please provide a valid Space name." else: bot_message = f"Creating Space `{hf_profile.username}/{repo_name}`..." new_repo_id, iframe_html = create_space_action(repo_name, space_sdk, hf_profile, hf_token) updated_preview = iframe_html bot_message += "\n✅ Space created." new_workflow = "awaiting_app_description" # User describes the app or debugging elif new_workflow in ("awaiting_app_description", "debugging"): if new_workflow == "awaiting_app_description": app_desc = message bot_message = f"Generating code for a `{space_sdk}` app based on: '{app_desc}'..." prompt = f""" You are an AI assistant specializing in Hugging Face Spaces using the {space_sdk} SDK. Generate a full, single-file Python app based on: '{app_desc}' Return **only** the code block (```python ...```). """ else: # debugging debug_instr = message logs = get_container_logs_action(new_repo_id, hf_profile, hf_token) bot_message = f"Analyzing logs and applying fixes: '{debug_instr}'..." prompt = f""" You are debugging a {space_sdk} Space. Logs: {logs} User instructions: '{debug_instr}' Generate a fixed, single-file Python app. Return only the ```python``` code block. """ new_workflow = "generating_code" resp = call_gemini(prompt, gemini_api_key) # Extract code start = resp.find("```python") end = resp.rfind("```") if start != -1 and end != -1 and end > start: code = resp[start + len("```python"):end].strip() bot_message += "\n✅ Code generated. Uploading..." new_workflow = "uploading_code" upload_log = upload_file_to_space_action(code, "app.py", new_repo_id, hf_profile, hf_token) bot_message += "\n" + upload_log if "✅ Uploaded" in upload_log: bot_message += "\nThe Space is now rebuilding. Say 'check logs' to fetch them." new_workflow = "awaiting_log_check" updated_preview = f'' else: new_workflow = "idle" else: bot_message += f"\n⚠️ Could not parse code from Gemini.\nResponse:\n{resp}" new_workflow = "awaiting_app_description" # Check logs elif new_workflow == "awaiting_log_check" and "check logs" in message.lower(): bot_message = "Fetching container logs..." updated_container = get_container_logs_action(new_repo_id, hf_profile, hf_token) updated_build = get_build_logs_action(new_repo_id, hf_profile, hf_token) bot_message += "\n✅ Logs updated. Describe any errors or say 'generate fix'." new_workflow = "reviewing_logs" # Auto-generate fix elif new_workflow == "reviewing_logs" and "generate fix" in message.lower(): latest = get_container_logs_action(new_repo_id, hf_profile, hf_token) if "Error" not in latest and "Exception" not in latest: bot_message = "No clear error found. What should I fix?" new_workflow = "reviewing_logs" else: bot_message = "Generating a fix based on logs..." new_workflow = "debugging" # Reset workflow elif "reset" in message.lower(): bot_message = "Workflow reset." new_repo_id = None updated_preview = "

No Space created yet.

" updated_container = "" updated_build = "" new_workflow = "idle" else: bot_message = ("Command not recognized. You can ask to 'create', " "'check logs', 'generate fix', or 'reset'.") new_workflow = workflow_state except Exception as e: bot_message = f"An unexpected error occurred: {e}" new_workflow = "idle" # Append bot response if history and history[-1][1] is None: history[-1][1] = bot_message else: history.append([None, bot_message]) return history, new_repo_id, new_workflow, updated_preview, updated_container, updated_build # --- Build the Gradio UI --- with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab: hf_profile = gr.State(None) hf_token = gr.State(None) gemini_key = gr.State(None) repo_id = gr.State(None) workflow = gr.State("idle") sdk_state = gr.State("gradio") with gr.Row(): # Sidebar with gr.Column(scale=1, min_width=300): gr.Markdown("## Hugging Face Login") login_status = gr.Markdown("*Not logged in.*") login_btn = gr.LoginButton(variant="huggingface") login_btn.logout( lambda: (None, None, "*Not logged in.*"), outputs=[hf_profile, hf_token, login_status] ).then( show_profile, inputs=[hf_profile], outputs=[login_status] ).then( lambda profile, token: (profile, token), inputs=[login_btn], outputs=[hf_profile, hf_token] ) gr.Markdown("## Google AI Studio API Key") gemini_input = gr.Textbox(label="API Key", type="password") gemini_status = gr.Markdown("") gemini_input.change( lambda k: k, inputs=[gemini_input], outputs=[gemini_key] ).then( configure_gemini, inputs=[gemini_key], outputs=[gemini_status] ) gr.Markdown("## Space SDK") sdk_selector = gr.Radio( choices=["gradio", "streamlit"], value="gradio", label="Template SDK" ) sdk_selector.change( lambda s: s, inputs=[sdk_selector], outputs=[sdk_state] ) # Main content with gr.Column(scale=3): chatbot = gr.Chatbot() user_input = gr.Textbox(placeholder="Type your message…") send_btn = gr.Button("Send") iframe = gr.HTML("

No Space created yet.

") build_txt = gr.Textbox(label="Build Logs", lines=10, interactive=False) run_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False) def wrap_chat(msg, history, prof, tok, key, rid, wf, sdk, prev, run_l, build_l): hist = [[u, v] for u, v in history] new_hist, new_rid, new_wf, new_prev, new_run, new_build = ai_workflow_chat( msg, hist, prof, tok, key, rid, wf, sdk, prev, run_l, build_l ) out_hist = [(u or "", v or "") for u, v in new_hist] return out_hist, new_rid, new_wf, new_prev, new_run, new_build send_btn.click( wrap_chat, inputs=[ user_input, chatbot, hf_profile, hf_token, gemini_key, repo_id, workflow, sdk_state, iframe, run_txt, build_txt ], outputs=[ chatbot, repo_id, workflow, iframe, run_txt, build_txt ] ) with gr.Blocks(title="Manual Hugging Face Space Manager") as manual_control_tab: manual_profile = gr.State(None) manual_token = gr.State(None) manual_repo = gr.State(None) gr.Markdown("## Manual Sign-In & Space Management") manual_login_btn = gr.LoginButton(variant="huggingface", size="lg") manual_status = gr.Markdown("*Not logged in.*") manual_models = gr.Markdown() manual_login_btn.logout( lambda: (None, None, "*Not logged in.*", ""), outputs=[manual_profile, manual_token, manual_status, manual_repo] ).then( show_profile, inputs=[manual_profile], outputs=[manual_status] ).then( lambda profile, token: (profile, token), inputs=[manual_login_btn], outputs=[manual_profile, manual_token] ).then( list_private_models, inputs=[manual_profile], outputs=[manual_models] ) manual_repo_name = gr.Textbox(label="New Space name", placeholder="my-space") manual_sdk_sel = gr.Radio( choices=["gradio","streamlit"], value="gradio", label="Template SDK" ) manual_create_btn = gr.Button("Create Space", interactive=False) manual_create_logs= gr.Textbox(label="Create Logs", lines=3, interactive=False) manual_preview = gr.HTML("

No Space created yet.

") manual_login_btn.click( lambda prof: gr.update(interactive=prof is not None), inputs=[manual_profile], outputs=[manual_create_btn] ) manual_create_btn.click( create_space_action, inputs=[manual_repo_name, manual_sdk_sel, manual_profile, manual_token], outputs=[manual_repo, manual_preview] ).then( lambda x: "", outputs=[manual_create_logs] ) # File upload manual_path = gr.Textbox(label="Path in Space", value="app.py") manual_file = gr.File(label="Select file") manual_up_btn = gr.Button("Upload File", interactive=False) manual_up_log = gr.Textbox(label="Upload Logs", lines=2, interactive=False) manual_repo.change( lambda rid, prof: gr.update(interactive=bool(rid and prof)), inputs=[manual_repo, manual_profile], outputs=[manual_up_btn] ) manual_login_btn.click( lambda rid, prof: gr.update(interactive=bool(rid and prof)), inputs=[manual_repo, manual_profile], outputs=[manual_up_btn] ) manual_up_btn.click( upload_file_to_space_action, inputs=[manual_file, manual_path, manual_repo, manual_profile, manual_token], outputs=[manual_up_log] ) # Logs manual_build_btn = gr.Button("Fetch Build Logs", interactive=False) manual_container_btn = gr.Button("Fetch Container Logs", interactive=False) manual_build_txt = gr.Textbox(label="Build Logs", lines=10, interactive=False) manual_container_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False) for btn in (manual_build_btn, manual_container_btn): manual_repo.change( lambda rid, prof: gr.update(interactive=bool(rid and prof)), inputs=[manual_repo, manual_profile], outputs=[btn] ) manual_login_btn.click( lambda rid, prof: gr.update(interactive=bool(rid and prof)), inputs=[manual_repo, manual_profile], outputs=[btn] ) manual_build_btn.click( get_build_logs_action, inputs=[manual_repo, manual_profile, manual_token], outputs=[manual_build_txt] ) manual_container_btn.click( get_container_logs_action, inputs=[manual_repo, manual_profile, manual_token], outputs=[manual_container_txt] ) demo = gr.TabbedInterface( [ai_builder_tab, manual_control_tab], ["AI App Builder", "Manual Control"] ) if __name__ == "__main__": demo.launch()