Spaces:
Sleeping
Sleeping
# main.py | |
import os | |
import json | |
import uuid | |
import gradio as gr | |
from google import genai | |
from google.genai import types | |
from google.genai.types import Tool, GoogleSearch | |
from huggingface_hub import create_repo, HfApi | |
# βββ Configuration βββ | |
MODEL_ID = "gemini-2.5-flash-preview-04-17" | |
WORKSPACE_DIR = "workspace" | |
SYSTEM_INSTRUCTION = ( | |
"You are a helpful coding assistant that scaffolds a complete Hugging Face Space app. " | |
"Based on the user's request, decide between Gradio or Streamlit (whichever fits best), " | |
"and respond with exactly one JSON object with keys:\n" | |
" β’ \"framework\": either \"gradio\" or \"streamlit\"\n" | |
" β’ \"files\": a map of relative file paths to file contents\n" | |
" β’ \"message\": a human-readable summary\n" | |
"Do not include extra text or markdown." | |
) | |
# In-memory session store: maps session IDs to state dicts | |
state_store = {} | |
def start_app(gemini_key, hf_token, hf_username, repo_name): | |
os.makedirs(WORKSPACE_DIR, exist_ok=True) | |
client = genai.Client(api_key=gemini_key) | |
config = types.GenerateContentConfig(system_instruction=SYSTEM_INSTRUCTION) | |
tools = [Tool(google_search=GoogleSearch())] | |
chat = client.chats.create(model=MODEL_ID, config=config, tools=tools) | |
local_path = os.path.join(WORKSPACE_DIR, repo_name) | |
os.makedirs(local_path, exist_ok=True) | |
state = { | |
"chat": chat, | |
"hf_token": hf_token, | |
"hf_username": hf_username, | |
"repo_name": repo_name, | |
"created": False, | |
"repo_id": None, | |
"local_path": local_path, | |
"logs": [f"Initialized workspace at {WORKSPACE_DIR}/{repo_name}."], | |
} | |
return state | |
def handle_message(user_msg, state): | |
chat = state["chat"] | |
logs = state.get("logs", []) | |
logs.append(f"> User: {user_msg}") | |
resp = chat.send_message(user_msg) | |
logs.append("Received response from Gemini.") | |
text = resp.text | |
try: | |
data = json.loads(text) | |
framework = data["framework"] | |
files = data.get("files", {}) | |
reply_msg = data.get("message", "") | |
except Exception: | |
logs.append("β οΈ Failed to parse assistant JSON.\n" + text) | |
state["logs"] = logs | |
return "β οΈ Parsing error. Check logs.", state | |
if not state["created"]: | |
full_repo = f"{state['hf_username']}/{state['repo_name']}" | |
logs.append(f"Creating HF Space '{full_repo}' with template '{framework}'.") | |
create_repo( | |
repo_id=full_repo, | |
token=state["hf_token"], | |
exist_ok=True, | |
repo_type="space", | |
space_sdk=framework | |
) | |
state["created"] = True | |
state["repo_id"] = full_repo | |
state["embed_url"] = f"https://huggingface.co/spaces/{full_repo}" | |
if files: | |
logs.append(f"Writing {len(files)} file(s): {list(files.keys())}") | |
for relpath, content in files.items(): | |
dest = os.path.join(state["local_path"], relpath) | |
os.makedirs(os.path.dirname(dest), exist_ok=True) | |
with open(dest, "w", encoding="utf-8") as f: | |
f.write(content) | |
logs.append("Uploading snapshot to Hugging Face...") | |
api = HfApi(token=state["hf_token"]) | |
api.upload_folder( | |
folder_path=state["local_path"], | |
repo_id=state["repo_id"], | |
repo_type="space" | |
) | |
logs.append("Snapshot upload complete.") | |
state["logs"] = logs | |
return reply_msg, state | |
# βββ Gradio UI βββ | |
with gr.Blocks() as demo: | |
with gr.Row(): | |
with gr.Column(scale=1): | |
gemini_key = gr.Textbox(label="Gemini API Key", type="password") | |
hf_token = gr.Textbox(label="Hugging Face Token", type="password") | |
hf_user = gr.Textbox(label="HF Username") | |
repo_name = gr.Textbox(label="New App (repo) name") | |
session_id = gr.Textbox(value="", visible=False) | |
start_btn = gr.Button("Start a new app") | |
with gr.Column(scale=3): | |
chatbot = gr.Chatbot(type="messages") | |
logs_display = gr.Textbox(label="Operation Logs", interactive=False, lines=8) | |
preview_iframe = gr.HTML("<p>No deployed app yet.</p>") | |
user_msg = gr.Textbox(label="Your message") | |
send_btn = gr.Button("Send") | |
def on_start(g_key, h_token, h_user, r_name): | |
new_id = str(uuid.uuid4()) | |
state = start_app(g_key, h_token, h_user, r_name) | |
state_store[new_id] = state | |
logs = "\n".join(state["logs"]) | |
return new_id, logs, "<p>Awaiting first instruction...</p>" | |
start_btn.click( | |
on_start, | |
inputs=[gemini_key, hf_token, hf_user, repo_name], | |
outputs=[session_id, logs_display, preview_iframe] | |
) | |
def on_send(msg, chat_history, sess_id): | |
if not sess_id or sess_id not in state_store: | |
err = "Error: No API found. Please start a new app." | |
return chat_history + [("", err)], sess_id, "", "" | |
state = state_store[sess_id] | |
reply, new_state = handle_message(msg, state) | |
state_store[sess_id] = new_state | |
chat_history = chat_history + [(msg, reply)] | |
logs = "\n".join(new_state.get("logs", [])) | |
embed = "" | |
if new_state.get("embed_url"): | |
embed = f'<iframe src="{new_state["embed_url"]}" width="100%" height="500px"></iframe>' | |
return chat_history, sess_id, logs, embed | |
send_btn.click( | |
on_send, | |
inputs=[user_msg, chatbot, session_id], | |
outputs=[chatbot, session_id, logs_display, preview_iframe] | |
) | |
user_msg.submit( | |
on_send, | |
inputs=[user_msg, chatbot, session_id], | |
outputs=[chatbot, session_id, logs_display, preview_iframe] | |
) | |
demo.launch() | |