import os import json import uuid from typing import Any, Dict, Tuple import gradio as gr from huggingface_hub import create_repo, HfApi from google import genai from google.genai import types from google.genai.types import Tool, GoogleSearch # ----------------------------------------------------------------------------- # Configuration # ----------------------------------------------------------------------------- MODEL_ID: str = "gemini-2.5-flash-preview-04-17" WORKSPACE_DIR: str = "workspace" SYSTEM_INSTRUCTION: str = ( "You are a helpful coding assistant that scaffolds a complete Hugging Face Space app. " "Based on the user's request, decide between Gradio or Streamlit (whichever fits best), " "and respond with exactly one JSON object with keys:\n" " • \"framework\": either \"gradio\" or \"streamlit\"\n" " • \"files\": a map of relative file paths to file contents\n" " • \"message\": a human‑readable summary\n" "Do not include extra text or markdown." ) # In‑memory session store: maps session IDs -> state dicts state_store: Dict[str, Dict[str, Any]] = {} # ----------------------------------------------------------------------------- # Helper functions # ----------------------------------------------------------------------------- def start_app(gemini_key: str, hf_token: str, hf_username: str, repo_name: str) -> Dict[str, Any]: """Initialise chat with Gemini + create a local workspace for the new app.""" os.makedirs(WORKSPACE_DIR, exist_ok=True) # Gemini client & chat session client = genai.Client(api_key=gemini_key) config = types.GenerateContentConfig(system_instruction=SYSTEM_INSTRUCTION) tools = [Tool(google_search=GoogleSearch())] chat = client.chats.create(model=MODEL_ID, config=config, tools=tools) # Local project folder (used as upload snapshot source) local_path = os.path.join(WORKSPACE_DIR, repo_name) os.makedirs(local_path, exist_ok=True) return { "chat": chat, "hf_token": hf_token, "hf_username": hf_username, "repo_name": repo_name, "created": False, "repo_id": None, "local_path": local_path, "embed_url": None, "logs": [f"Initialized workspace at {WORKSPACE_DIR}/{repo_name}."] } def handle_message(user_msg: str, state: Dict[str, Any]) -> Tuple[str, Dict[str, Any]]: """Send *user_msg* to Gemini, act on the JSON response, and return assistant reply.""" chat = state["chat"] logs = state.setdefault("logs", []) logs.append(f"> **User**: {user_msg}") resp = chat.send_message(user_msg) logs.append("Received response from Gemini.") # --------------------------------------------------------------------- # Parse Gemini JSON answer # --------------------------------------------------------------------- try: data = json.loads(resp.text) framework: str = data["framework"] files: Dict[str, str] = data.get("files", {}) reply_msg: str = data.get("message", "") except Exception: logs.append("⚠️ Failed to parse assistant JSON.\n" + resp.text) return "⚠️ Parsing error. Check logs.", state # --------------------------------------------------------------------- # Create the target Space on first run # --------------------------------------------------------------------- if not state["created"]: full_repo = f"{state['hf_username']}/{state['repo_name']}" logs.append(f"Creating HF Space **{full_repo}** (template '{framework}') …") create_repo( repo_id=full_repo, token=state["hf_token"], exist_ok=True, repo_type="space", space_sdk=framework, ) state.update({ "created": True, "repo_id": full_repo, "embed_url": f"https://huggingface.co/spaces/{full_repo}", }) # --------------------------------------------------------------------- # Write / overwrite files # --------------------------------------------------------------------- if files: logs.append(f"Writing {len(files)} file(s): {list(files)}") for relpath, content in files.items(): dest = os.path.join(state["local_path"], relpath) os.makedirs(os.path.dirname(dest), exist_ok=True) with open(dest, "w", encoding="utf‑8") as fp: fp.write(content) # --------------------------------------------------------------------- # Push snapshot # --------------------------------------------------------------------- logs.append("Uploading snapshot to Hugging Face …") HfApi(token=state["hf_token"]).upload_folder( folder_path=state["local_path"], repo_id=state["repo_id"], repo_type="space", ) logs.append("Snapshot upload complete.") return reply_msg, state # ----------------------------------------------------------------------------- # Gradio UI # ----------------------------------------------------------------------------- with gr.Blocks(title="Gemini → HF Space scaffolder") as demo: # ------------------------------------------------------------------------- # OAuth UI row (always visible) # ------------------------------------------------------------------------- with gr.Row(): login_btn = gr.LoginButton( logout_value="Logout ({username})", # label once signed in variant="huggingface", # optional styling size="lg" ) status_md = gr.Markdown("Not logged in") # Callback to refresh the status text whenever the page loads OR OAuth changes def show_profile(profile: gr.OAuthProfile | None): if profile is None: return gr.Markdown.update(value="*Not logged in.*") return gr.Markdown.update(value=f"Logged in as **{profile.username}**") demo.load(show_profile, inputs=None, outputs=status_md) # ------------------------------------------------------------------------- # Main app controls (hidden behind OAuth) # ------------------------------------------------------------------------- with gr.Row(): with gr.Column(scale=1): gemini_key = gr.Textbox(label="Gemini API Key", type="password") hf_user = gr.Textbox(label="HF Username") repo_name = gr.Textbox(label="New App (repo) name") session_id = gr.Textbox(value="", visible=False) start_btn = gr.Button("Start a new app") with gr.Column(scale=3): chatbot = gr.Chatbot(type="messages") logs_display = gr.Textbox(label="Operation Logs", interactive=False, lines=8) preview_iframe = gr.HTML("

No deployed app yet.

") user_msg = gr.Textbox(label="Your message") send_btn = gr.Button("Send", interactive=False) # --------------------------------------------------------------------- # Callback: start a new app (requires OAuth token) # --------------------------------------------------------------------- def on_start(g_key: str, h_user: str, r_name: str, oauth_token: gr.OAuthToken | None): if oauth_token is None: return gr.Error("Please *Sign in with Hugging Face* first."), "", "", gr.update(interactive=False) new_id = str(uuid.uuid4()) state_store[new_id] = start_app(g_key, oauth_token.token, h_user, r_name) logs = "\n".join(state_store[new_id]["logs"]) return new_id, logs, "

Awaiting first instruction…

", gr.update(interactive=True) start_btn.click( on_start, inputs=[gemini_key, hf_user, repo_name], outputs=[session_id, logs_display, preview_iframe, send_btn], ) # --------------------------------------------------------------------- # Callback: send a chat message # --------------------------------------------------------------------- def on_send(msg: str, chat_history: list[list[str]], sess_id: str): if not sess_id or sess_id not in state_store: err = "Error: No active session. Click *Start a new app* first." return chat_history + [("", err)], sess_id, "", "" reply, new_state = handle_message(msg, state_store[sess_id]) state_store[sess_id] = new_state chat_history.append((msg, reply)) logs = "\n".join(new_state["logs"]) embed = ( f'' if new_state.get("embed_url") else "" ) return chat_history, sess_id, logs, embed send_btn.click( on_send, inputs=[user_msg, chatbot, session_id], outputs=[chatbot, session_id, logs_display, preview_iframe], ) user_msg.submit( on_send, inputs=[user_msg, chatbot, session_id], outputs=[chatbot, session_id, logs_display, preview_iframe], ) # ----------------------------------------------------------------------------- # Launch the Gradio app # ----------------------------------------------------------------------------- if __name__ == "__main__": demo.launch()