Spaces:
Sleeping
Sleeping
File size: 9,250 Bytes
9194337 76a7609 2b6e504 9194337 2b6e504 9194337 2b6e504 9194337 2b6e504 9194337 45e9fba 2b6e504 76a7609 2b6e504 9194337 8e77934 2b6e504 9194337 2b6e504 9194337 8e77934 9194337 8e77934 2b6e504 9194337 2b6e504 9194337 8e77934 9194337 2b6e504 9194337 2b6e504 9194337 8e77934 2b6e504 8e77934 9194337 2b6e504 9194337 8e77934 2b6e504 8e77934 9194337 2b6e504 9194337 8e77934 9194337 8e77934 2b6e504 8e77934 9194337 8e77934 9194337 2b6e504 9194337 8e77934 2b6e504 8e77934 2b6e504 a400f6e 9194337 8e77934 9194337 2b6e504 8e77934 2b6e504 a72fba6 2b6e504 a72fba6 2b6e504 d5d8755 a72fba6 76a7609 a72fba6 76a7609 d5d8755 a72fba6 d5d8755 2b6e504 9194337 76a7609 2b6e504 8e77934 2b6e504 9194337 2b6e504 8e77934 9194337 2b6e504 76a7609 2b6e504 76a7609 8e77934 76a7609 8e77934 a72fba6 8e77934 2b6e504 8e77934 76a7609 9194337 76a7609 8e77934 9194337 76a7609 8e77934 9194337 2b6e504 8e77934 45e9fba |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 |
import os
import json
import uuid
from typing import Any, Dict, Tuple
import gradio as gr
from huggingface_hub import create_repo, HfApi
from google import genai
from google.genai import types
from google.genai.types import Tool, GoogleSearch
# -----------------------------------------------------------------------------
# Configuration
# -----------------------------------------------------------------------------
MODEL_ID: str = "gemini-2.5-flash-preview-04-17"
WORKSPACE_DIR: str = "workspace"
SYSTEM_INSTRUCTION: str = (
"You are a helpful coding assistant that scaffolds a complete Hugging Face Space app. "
"Based on the user's request, decide between Gradio or Streamlit (whichever fits best), "
"and respond with exactly one JSON object with keys:\n"
" • \"framework\": either \"gradio\" or \"streamlit\"\n"
" • \"files\": a map of relative file paths to file contents\n"
" • \"message\": a human‑readable summary\n"
"Do not include extra text or markdown."
)
# In‑memory session store: maps session IDs -> state dicts
state_store: Dict[str, Dict[str, Any]] = {}
# -----------------------------------------------------------------------------
# Helper functions
# -----------------------------------------------------------------------------
def start_app(gemini_key: str, hf_token: str, hf_username: str, repo_name: str) -> Dict[str, Any]:
"""Initialise chat with Gemini + create a local workspace for the new app."""
os.makedirs(WORKSPACE_DIR, exist_ok=True)
# Gemini client & chat session
client = genai.Client(api_key=gemini_key)
config = types.GenerateContentConfig(system_instruction=SYSTEM_INSTRUCTION)
tools = [Tool(google_search=GoogleSearch())]
chat = client.chats.create(model=MODEL_ID, config=config, tools=tools)
# Local project folder (used as upload snapshot source)
local_path = os.path.join(WORKSPACE_DIR, repo_name)
os.makedirs(local_path, exist_ok=True)
return {
"chat": chat,
"hf_token": hf_token,
"hf_username": hf_username,
"repo_name": repo_name,
"created": False,
"repo_id": None,
"local_path": local_path,
"embed_url": None,
"logs": [f"Initialized workspace at {WORKSPACE_DIR}/{repo_name}."]
}
def handle_message(user_msg: str, state: Dict[str, Any]) -> Tuple[str, Dict[str, Any]]:
"""Send *user_msg* to Gemini, act on the JSON response, and return assistant reply."""
chat = state["chat"]
logs = state.setdefault("logs", [])
logs.append(f"> **User**: {user_msg}")
resp = chat.send_message(user_msg)
logs.append("Received response from Gemini.")
# ---------------------------------------------------------------------
# Parse Gemini JSON answer
# ---------------------------------------------------------------------
try:
data = json.loads(resp.text)
framework: str = data["framework"]
files: Dict[str, str] = data.get("files", {})
reply_msg: str = data.get("message", "")
except Exception:
logs.append("⚠️ Failed to parse assistant JSON.\n" + resp.text)
return "⚠️ Parsing error. Check logs.", state
# ---------------------------------------------------------------------
# Create the target Space on first run
# ---------------------------------------------------------------------
if not state["created"]:
full_repo = f"{state['hf_username']}/{state['repo_name']}"
logs.append(f"Creating HF Space **{full_repo}** (template '{framework}') …")
create_repo(
repo_id=full_repo,
token=state["hf_token"],
exist_ok=True,
repo_type="space",
space_sdk=framework,
)
state.update({
"created": True,
"repo_id": full_repo,
"embed_url": f"https://huggingface.co/spaces/{full_repo}",
})
# ---------------------------------------------------------------------
# Write / overwrite files
# ---------------------------------------------------------------------
if files:
logs.append(f"Writing {len(files)} file(s): {list(files)}")
for relpath, content in files.items():
dest = os.path.join(state["local_path"], relpath)
os.makedirs(os.path.dirname(dest), exist_ok=True)
with open(dest, "w", encoding="utf‑8") as fp:
fp.write(content)
# ---------------------------------------------------------------------
# Push snapshot
# ---------------------------------------------------------------------
logs.append("Uploading snapshot to Hugging Face …")
HfApi(token=state["hf_token"]).upload_folder(
folder_path=state["local_path"],
repo_id=state["repo_id"],
repo_type="space",
)
logs.append("Snapshot upload complete.")
return reply_msg, state
# -----------------------------------------------------------------------------
# Gradio UI
# -----------------------------------------------------------------------------
with gr.Blocks(title="Gemini → HF Space scaffolder") as demo:
# -------------------------------------------------------------------------
# OAuth UI row (always visible)
# -------------------------------------------------------------------------
with gr.Row():
login_btn = gr.LoginButton(
logout_value="Logout ({username})", # label once signed in
variant="huggingface", # optional styling
size="lg"
)
status_md = gr.Markdown("Not logged in")
# Callback to refresh the status text whenever the page loads OR OAuth changes
def show_profile(profile: gr.OAuthProfile | None):
if profile is None:
return gr.Markdown.update(value="*Not logged in.*")
return gr.Markdown.update(value=f"Logged in as **{profile.username}**")
demo.load(show_profile, inputs=None, outputs=status_md)
# -------------------------------------------------------------------------
# Main app controls (hidden behind OAuth)
# -------------------------------------------------------------------------
with gr.Row():
with gr.Column(scale=1):
gemini_key = gr.Textbox(label="Gemini API Key", type="password")
hf_user = gr.Textbox(label="HF Username")
repo_name = gr.Textbox(label="New App (repo) name")
session_id = gr.Textbox(value="", visible=False)
start_btn = gr.Button("Start a new app")
with gr.Column(scale=3):
chatbot = gr.Chatbot(type="messages")
logs_display = gr.Textbox(label="Operation Logs", interactive=False, lines=8)
preview_iframe = gr.HTML("<p>No deployed app yet.</p>")
user_msg = gr.Textbox(label="Your message")
send_btn = gr.Button("Send", interactive=False)
# ---------------------------------------------------------------------
# Callback: start a new app (requires OAuth token)
# ---------------------------------------------------------------------
def on_start(g_key: str, h_user: str, r_name: str, oauth_token: gr.OAuthToken | None):
if oauth_token is None:
return gr.Error("Please *Sign in with Hugging Face* first."), "", "", gr.update(interactive=False)
new_id = str(uuid.uuid4())
state_store[new_id] = start_app(g_key, oauth_token.token, h_user, r_name)
logs = "\n".join(state_store[new_id]["logs"])
return new_id, logs, "<p>Awaiting first instruction…</p>", gr.update(interactive=True)
start_btn.click(
on_start,
inputs=[gemini_key, hf_user, repo_name],
outputs=[session_id, logs_display, preview_iframe, send_btn],
)
# ---------------------------------------------------------------------
# Callback: send a chat message
# ---------------------------------------------------------------------
def on_send(msg: str, chat_history: list[list[str]], sess_id: str):
if not sess_id or sess_id not in state_store:
err = "Error: No active session. Click *Start a new app* first."
return chat_history + [("", err)], sess_id, "", ""
reply, new_state = handle_message(msg, state_store[sess_id])
state_store[sess_id] = new_state
chat_history.append((msg, reply))
logs = "\n".join(new_state["logs"])
embed = (
f'<iframe src="{new_state["embed_url"]}" width="100%" height="500px"></iframe>'
if new_state.get("embed_url") else ""
)
return chat_history, sess_id, logs, embed
send_btn.click(
on_send,
inputs=[user_msg, chatbot, session_id],
outputs=[chatbot, session_id, logs_display, preview_iframe],
)
user_msg.submit(
on_send,
inputs=[user_msg, chatbot, session_id],
outputs=[chatbot, session_id, logs_display, preview_iframe],
)
# -----------------------------------------------------------------------------
# Launch the Gradio app
# -----------------------------------------------------------------------------
if __name__ == "__main__":
demo.launch()
|