testtsettset / app.py
wuhp's picture
Update app.py
8e77934 verified
raw
history blame
7.67 kB
import os
import json
import uuid
import gradio as gr
from google import genai
from google.genai import types
from google.genai.types import Tool, GoogleSearch
from huggingface_hub import create_repo, HfApi
# ——— Configuration ———
MODEL_ID = "gemini-2.5-flash-preview-04-17"
WORKSPACE_DIR = "workspace"
SYSTEM_INSTRUCTION = (
"You are a helpful coding assistant that scaffolds a complete Hugging Face Space app. "
"Based on the user's request, decide between Gradio or Streamlit (whichever fits best), "
"and respond with exactly one JSON object with keys:\n"
" • \"framework\": either \"gradio\" or \"streamlit\"\n"
" • \"files\": a map of relative file paths to file contents\n"
" • \"message\": a human-readable summary\n"
"Do not include extra text or markdown."
)
# In-memory session store: maps session IDs to state dicts
state_store: dict[str, dict] = {}
def start_app(gemini_key: str, hf_token: str, hf_username: str, repo_name: str) -> dict:
"""Initialises a new chat session + local workspace."""
os.makedirs(WORKSPACE_DIR, exist_ok=True)
# Gemini chat client
client = genai.Client(api_key=gemini_key)
config = types.GenerateContentConfig(system_instruction=SYSTEM_INSTRUCTION)
tools = [Tool(google_search=GoogleSearch())]
chat = client.chats.create(model=MODEL_ID, config=config, tools=tools)
# per‑project workspace (persisted so we can push snapshots)
local_path = os.path.join(WORKSPACE_DIR, repo_name)
os.makedirs(local_path, exist_ok=True)
return {
"chat": chat,
"hf_token": hf_token,
"hf_username": hf_username,
"repo_name": repo_name,
"created": False,
"repo_id": None,
"local_path": local_path,
"embed_url": None,
"logs": [f"Initialized workspace at {WORKSPACE_DIR}/{repo_name}."],
}
def handle_message(user_msg: str, state: dict):
"""Send user message to Gemini, scaffold / update the Space and return reply text."""
chat = state["chat"]
logs = state.setdefault("logs", [])
logs.append(f"> User: {user_msg}")
resp = chat.send_message(user_msg)
logs.append("Received response from Gemini.")
try:
data = json.loads(resp.text)
framework: str = data["framework"]
files: dict[str, str] = data.get("files", {})
reply_msg: str = data.get("message", "")
except Exception:
logs.append("⚠️ Failed to parse assistant JSON." + "\n" + resp.text)
return "⚠️ Parsing error. Check logs.", state
# ---------------------------------------------------------------------
# Create the Space on first run
# ---------------------------------------------------------------------
if not state["created"]:
full_repo = f"{state['hf_username']}/{state['repo_name']}"
logs.append(f"Creating HF Space '{full_repo}' (template '{framework}') …")
create_repo(
repo_id=full_repo,
token=state["hf_token"],
exist_ok=True,
repo_type="space",
space_sdk=framework,
)
state.update({
"created": True,
"repo_id": full_repo,
"embed_url": f"https://huggingface.co/spaces/{full_repo}",
})
# ---------------------------------------------------------------------
# Write/overwrite files returned by Gemini
# ---------------------------------------------------------------------
if files:
logs.append(f"Writing {len(files)} file(s): {list(files)}")
for relpath, content in files.items():
dest = os.path.join(state["local_path"], relpath)
os.makedirs(os.path.dirname(dest), exist_ok=True)
with open(dest, "w", encoding="utf-8") as f:
f.write(content)
# ---------------------------------------------------------------------
# Commit the snapshot to the Space
# ---------------------------------------------------------------------
logs.append("Uploading snapshot to Hugging Face …")
api = HfApi(token=state["hf_token"])
api.upload_folder(
folder_path=state["local_path"],
repo_id=state["repo_id"],
repo_type="space",
)
logs.append("Snapshot upload complete.")
return reply_msg, state
# ——— Gradio UI ———
with gr.Blocks(title="Gemini → HF Space scaffolder") as demo:
with gr.Row():
# -----------------------------------------------------------------
# Left column – credentials & new‑app form
# -----------------------------------------------------------------
with gr.Column(scale=1):
gemini_key = gr.Textbox(label="Gemini API Key", type="password")
hf_token = gr.Textbox(label="Hugging Face Token", type="password")
hf_user = gr.Textbox(label="HF Username")
repo_name = gr.Textbox(label="New App (repo) name")
session_id = gr.Textbox(value="", visible=False)
start_btn = gr.Button("Start a new app")
# -----------------------------------------------------------------
# Right column – chat & live preview
# -----------------------------------------------------------------
with gr.Column(scale=3):
chatbot = gr.Chatbot(type="messages")
logs_display = gr.Textbox(label="Operation Logs", interactive=False, lines=8)
preview_iframe = gr.HTML("<p>No deployed app yet.</p>")
user_msg = gr.Textbox(label="Your message")
send_btn = gr.Button("Send", interactive=False) # <— disabled until a session exists
# --------------- Callbacks ---------------
def on_start(g_key, h_token, h_user, r_name):
new_id = str(uuid.uuid4())
state_store[new_id] = start_app(g_key, h_token, h_user, r_name)
logs = "\n".join(state_store[new_id]["logs"])
return (
new_id, # session_id (hidden)
logs, # logs_display
"<p>Awaiting first instruction…</p>", # preview_iframe
gr.update(interactive=True), # enable send button
)
start_btn.click(
on_start,
inputs=[gemini_key, hf_token, hf_user, repo_name],
outputs=[session_id, logs_display, preview_iframe, send_btn],
)
def on_send(msg, chat_history, sess_id):
if not sess_id or sess_id not in state_store:
err = "Error: No session found. Please start a new app first."
return chat_history + [("", err)], sess_id, "", ""
reply, new_state = handle_message(msg, state_store[sess_id])
state_store[sess_id] = new_state
chat_history.append((msg, reply))
logs = "\n".join(new_state["logs"])
embed = (
f'<iframe src="{new_state["embed_url"]}" width="100%" height="500px"></iframe>'
if new_state.get("embed_url")
else ""
)
return chat_history, sess_id, logs, embed
send_btn.click(
on_send,
inputs=[user_msg, chatbot, session_id],
outputs=[chatbot, session_id, logs_display, preview_iframe],
)
user_msg.submit(
on_send,
inputs=[user_msg, chatbot, session_id],
outputs=[chatbot, session_id, logs_display, preview_iframe],
)
# -------------------------------------------------------------------------
# Run the Gradio app – expose on default 7860 unless HF Space overrides it
# -------------------------------------------------------------------------
if __name__ == "__main__":
demo.launch()