Spaces:
Sleeping
Sleeping
File size: 7,674 Bytes
9194337 76a7609 45e9fba 9194337 a400f6e 9194337 45e9fba 76a7609 8e77934 76a7609 45e9fba 8e77934 9194337 8e77934 9194337 8e77934 9194337 8e77934 9194337 8e77934 9194337 8e77934 9194337 8e77934 9194337 8e77934 9194337 8e77934 9194337 8e77934 9194337 8e77934 9194337 8e77934 9194337 8e77934 9194337 8e77934 9194337 8e77934 9194337 8e77934 a400f6e 9194337 8e77934 9194337 8e77934 d5d8755 8e77934 d5d8755 76a7609 d5d8755 76a7609 8e77934 d5d8755 8e77934 d5d8755 8e77934 9194337 8e77934 9194337 76a7609 8e77934 9194337 8e77934 9194337 76a7609 8e77934 76a7609 8e77934 76a7609 8e77934 76a7609 9194337 76a7609 8e77934 9194337 76a7609 8e77934 9194337 8e77934 45e9fba |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 |
import os
import json
import uuid
import gradio as gr
from google import genai
from google.genai import types
from google.genai.types import Tool, GoogleSearch
from huggingface_hub import create_repo, HfApi
# ——— Configuration ———
MODEL_ID = "gemini-2.5-flash-preview-04-17"
WORKSPACE_DIR = "workspace"
SYSTEM_INSTRUCTION = (
"You are a helpful coding assistant that scaffolds a complete Hugging Face Space app. "
"Based on the user's request, decide between Gradio or Streamlit (whichever fits best), "
"and respond with exactly one JSON object with keys:\n"
" • \"framework\": either \"gradio\" or \"streamlit\"\n"
" • \"files\": a map of relative file paths to file contents\n"
" • \"message\": a human-readable summary\n"
"Do not include extra text or markdown."
)
# In-memory session store: maps session IDs to state dicts
state_store: dict[str, dict] = {}
def start_app(gemini_key: str, hf_token: str, hf_username: str, repo_name: str) -> dict:
"""Initialises a new chat session + local workspace."""
os.makedirs(WORKSPACE_DIR, exist_ok=True)
# Gemini chat client
client = genai.Client(api_key=gemini_key)
config = types.GenerateContentConfig(system_instruction=SYSTEM_INSTRUCTION)
tools = [Tool(google_search=GoogleSearch())]
chat = client.chats.create(model=MODEL_ID, config=config, tools=tools)
# per‑project workspace (persisted so we can push snapshots)
local_path = os.path.join(WORKSPACE_DIR, repo_name)
os.makedirs(local_path, exist_ok=True)
return {
"chat": chat,
"hf_token": hf_token,
"hf_username": hf_username,
"repo_name": repo_name,
"created": False,
"repo_id": None,
"local_path": local_path,
"embed_url": None,
"logs": [f"Initialized workspace at {WORKSPACE_DIR}/{repo_name}."],
}
def handle_message(user_msg: str, state: dict):
"""Send user message to Gemini, scaffold / update the Space and return reply text."""
chat = state["chat"]
logs = state.setdefault("logs", [])
logs.append(f"> User: {user_msg}")
resp = chat.send_message(user_msg)
logs.append("Received response from Gemini.")
try:
data = json.loads(resp.text)
framework: str = data["framework"]
files: dict[str, str] = data.get("files", {})
reply_msg: str = data.get("message", "")
except Exception:
logs.append("⚠️ Failed to parse assistant JSON." + "\n" + resp.text)
return "⚠️ Parsing error. Check logs.", state
# ---------------------------------------------------------------------
# Create the Space on first run
# ---------------------------------------------------------------------
if not state["created"]:
full_repo = f"{state['hf_username']}/{state['repo_name']}"
logs.append(f"Creating HF Space '{full_repo}' (template '{framework}') …")
create_repo(
repo_id=full_repo,
token=state["hf_token"],
exist_ok=True,
repo_type="space",
space_sdk=framework,
)
state.update({
"created": True,
"repo_id": full_repo,
"embed_url": f"https://huggingface.co/spaces/{full_repo}",
})
# ---------------------------------------------------------------------
# Write/overwrite files returned by Gemini
# ---------------------------------------------------------------------
if files:
logs.append(f"Writing {len(files)} file(s): {list(files)}")
for relpath, content in files.items():
dest = os.path.join(state["local_path"], relpath)
os.makedirs(os.path.dirname(dest), exist_ok=True)
with open(dest, "w", encoding="utf-8") as f:
f.write(content)
# ---------------------------------------------------------------------
# Commit the snapshot to the Space
# ---------------------------------------------------------------------
logs.append("Uploading snapshot to Hugging Face …")
api = HfApi(token=state["hf_token"])
api.upload_folder(
folder_path=state["local_path"],
repo_id=state["repo_id"],
repo_type="space",
)
logs.append("Snapshot upload complete.")
return reply_msg, state
# ——— Gradio UI ———
with gr.Blocks(title="Gemini → HF Space scaffolder") as demo:
with gr.Row():
# -----------------------------------------------------------------
# Left column – credentials & new‑app form
# -----------------------------------------------------------------
with gr.Column(scale=1):
gemini_key = gr.Textbox(label="Gemini API Key", type="password")
hf_token = gr.Textbox(label="Hugging Face Token", type="password")
hf_user = gr.Textbox(label="HF Username")
repo_name = gr.Textbox(label="New App (repo) name")
session_id = gr.Textbox(value="", visible=False)
start_btn = gr.Button("Start a new app")
# -----------------------------------------------------------------
# Right column – chat & live preview
# -----------------------------------------------------------------
with gr.Column(scale=3):
chatbot = gr.Chatbot(type="messages")
logs_display = gr.Textbox(label="Operation Logs", interactive=False, lines=8)
preview_iframe = gr.HTML("<p>No deployed app yet.</p>")
user_msg = gr.Textbox(label="Your message")
send_btn = gr.Button("Send", interactive=False) # <— disabled until a session exists
# --------------- Callbacks ---------------
def on_start(g_key, h_token, h_user, r_name):
new_id = str(uuid.uuid4())
state_store[new_id] = start_app(g_key, h_token, h_user, r_name)
logs = "\n".join(state_store[new_id]["logs"])
return (
new_id, # session_id (hidden)
logs, # logs_display
"<p>Awaiting first instruction…</p>", # preview_iframe
gr.update(interactive=True), # enable send button
)
start_btn.click(
on_start,
inputs=[gemini_key, hf_token, hf_user, repo_name],
outputs=[session_id, logs_display, preview_iframe, send_btn],
)
def on_send(msg, chat_history, sess_id):
if not sess_id or sess_id not in state_store:
err = "Error: No session found. Please start a new app first."
return chat_history + [("", err)], sess_id, "", ""
reply, new_state = handle_message(msg, state_store[sess_id])
state_store[sess_id] = new_state
chat_history.append((msg, reply))
logs = "\n".join(new_state["logs"])
embed = (
f'<iframe src="{new_state["embed_url"]}" width="100%" height="500px"></iframe>'
if new_state.get("embed_url")
else ""
)
return chat_history, sess_id, logs, embed
send_btn.click(
on_send,
inputs=[user_msg, chatbot, session_id],
outputs=[chatbot, session_id, logs_display, preview_iframe],
)
user_msg.submit(
on_send,
inputs=[user_msg, chatbot, session_id],
outputs=[chatbot, session_id, logs_display, preview_iframe],
)
# -------------------------------------------------------------------------
# Run the Gradio app – expose on default 7860 unless HF Space overrides it
# -------------------------------------------------------------------------
if __name__ == "__main__":
demo.launch()
|