Spaces:
Sleeping
Sleeping
File size: 5,562 Bytes
9194337 45e9fba 9194337 45e9fba 9194337 45e9fba |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 |
# main.py
import os
import json
import gradio as gr
from google import genai
from google.genai import types
from google.genai.types import Tool, GoogleSearch
from huggingface_hub import create_repo, snapshot_upload
# βββ Configuration βββ
MODEL_ID = "gemini-2.5-flash-preview-04-17"
WORKSPACE_DIR = "workspace"
SYSTEM_INSTRUCTION = (
"You are a helpful coding assistant that scaffolds a complete Hugging Face Space app. "
"Based on the user's request, decide between Gradio or Streamlit (whichever fits best), "
"and respond with exactly one JSON object with keys:\n"
" β’ \"framework\": either \"gradio\" or \"streamlit\"\n"
" β’ \"files\": a map of relative file paths to file contents\n"
" β’ \"message\": a human-readable summary\n"
"Do not include extra text or markdown."
)
def start_app(gemini_key, hf_token, hf_username, repo_name):
"""
Initialize workspace and Gemini chat state.
"""
os.makedirs(WORKSPACE_DIR, exist_ok=True)
client = genai.Client(api_key=gemini_key)
config = types.GenerateContentConfig(system_instruction=SYSTEM_INSTRUCTION)
tools = [Tool(google_search=GoogleSearch())]
chat = client.chats.create(model=MODEL_ID, config=config, tools=tools)
local_path = os.path.join(WORKSPACE_DIR, repo_name)
os.makedirs(local_path, exist_ok=True)
state = {
"chat": chat,
"hf_token": hf_token,
"hf_username": hf_username,
"repo_name": repo_name,
"created": False,
"repo_id": None,
"local_path": local_path,
"logs": [f"Initialized workspace at {WORKSPACE_DIR}/{repo_name}."],
}
return state
def handle_message(user_msg, state):
"""
Send user message to Gemini, apply updates, commit to HF, and log steps.
"""
chat = state["chat"]
logs = state.get("logs", [])
logs.append(f"> User: {user_msg}")
# Generate or debug code via Gemini
resp = chat.send_message(user_msg)
logs.append("Received response from Gemini.")
text = resp.text
try:
data = json.loads(text)
framework = data["framework"]
files = data.get("files", {})
reply_msg = data.get("message", "")
except Exception:
logs.append("β οΈ Failed to parse assistant JSON.\n" + text)
state["logs"] = logs
return "β οΈ Parsing error. Check logs.", state
# On first structured response, create the HF Space
if not state["created"]:
full_repo = f"{state['hf_username']}/{state['repo_name']}"
logs.append(f"Creating HF Space '{full_repo}' with template '{framework}'.")
create_repo(
repo_id=full_repo,
token=state["hf_token"],
exist_ok=True,
repo_type="space",
space_sdk=framework
)
state["created"] = True
state["repo_id"] = full_repo
state["embed_url"] = f"https://huggingface.co/spaces/{full_repo}"
# Write file updates
if files:
logs.append(f"Writing {len(files)} file(s): {list(files.keys())}")
for relpath, content in files.items():
dest = os.path.join(state["local_path"], relpath)
os.makedirs(os.path.dirname(dest), exist_ok=True)
with open(dest, "w", encoding="utf-8") as f:
f.write(content)
# Commit the snapshot
logs.append("Uploading snapshot to Hugging Face...")
snapshot_upload(
repo_id=state["repo_id"],
repo_type="space",
token=state["hf_token"],
folder=state["local_path"],
commit_message="Update from assistant"
)
logs.append("Snapshot upload complete.")
state["logs"] = logs
return reply_msg, state
# βββ Gradio UI βββ
with gr.Blocks() as demo:
with gr.Sidebar():
gemini_key = gr.Textbox(label="Gemini API Key", type="password")
hf_token = gr.Textbox(label="Hugging Face Token", type="password")
hf_user = gr.Textbox(label="HF Username")
repo_name = gr.Textbox(label="New App (repo) name")
start_btn = gr.Button("Start a new app")
chatbot = gr.Chatbot()
state = gr.State(value=None)
logs_display = gr.Textbox(label="Operation Logs", interactive=False, lines=8)
preview_iframe = gr.HTML("<p>No deployed app yet.</p>")
user_msg = gr.Textbox(label="Your message")
send_btn = gr.Button("Send")
def on_start(g_key, h_token, h_user, r_name):
s = start_app(g_key, h_token, h_user, r_name)
logs = "\n".join(s["logs"])
return s, logs, "<p>Awaiting first instruction...</p>"
start_btn.click(
on_start,
inputs=[gemini_key, hf_token, hf_user, repo_name],
outputs=[state, logs_display, preview_iframe]
)
def on_send(msg, chat_history, s):
if s is None:
return chat_history, s, "", ""
reply, new_state = handle_message(msg, s)
chat_history = chat_history + [(msg, reply)]
logs = "\n".join(new_state.get("logs", []))
embed = ""
if new_state.get("embed_url"):
embed = f'<iframe src="{new_state["embed_url"]}" width="100%" height="500px"></iframe>'
return chat_history, new_state, logs, embed
send_btn.click(
on_send,
inputs=[user_msg, chatbot, state],
outputs=[chatbot, state, logs_display, preview_iframe]
)
user_msg.submit(
on_send,
inputs=[user_msg, chatbot, state],
outputs=[chatbot, state, logs_display, preview_iframe]
)
demo.launch()
|