Spaces:
Sleeping
Sleeping
File size: 5,781 Bytes
9194337 76a7609 45e9fba 9194337 a400f6e 9194337 45e9fba 76a7609 45e9fba 9194337 a400f6e 9194337 a400f6e 9194337 d5d8755 76a7609 d5d8755 76a7609 d5d8755 a3af143 d5d8755 9194337 76a7609 9194337 76a7609 9194337 76a7609 9194337 76a7609 9194337 76a7609 9194337 76a7609 9194337 45e9fba |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 |
# main.py
import os
import json
import uuid
import gradio as gr
from google import genai
from google.genai import types
from google.genai.types import Tool, GoogleSearch
from huggingface_hub import create_repo, HfApi
# βββ Configuration βββ
MODEL_ID = "gemini-2.5-flash-preview-04-17"
WORKSPACE_DIR = "workspace"
SYSTEM_INSTRUCTION = (
"You are a helpful coding assistant that scaffolds a complete Hugging Face Space app. "
"Based on the user's request, decide between Gradio or Streamlit (whichever fits best), "
"and respond with exactly one JSON object with keys:\n"
" β’ \"framework\": either \"gradio\" or \"streamlit\"\n"
" β’ \"files\": a map of relative file paths to file contents\n"
" β’ \"message\": a human-readable summary\n"
"Do not include extra text or markdown."
)
# In-memory session store: maps session IDs to state dicts
state_store = {}
def start_app(gemini_key, hf_token, hf_username, repo_name):
os.makedirs(WORKSPACE_DIR, exist_ok=True)
client = genai.Client(api_key=gemini_key)
config = types.GenerateContentConfig(system_instruction=SYSTEM_INSTRUCTION)
tools = [Tool(google_search=GoogleSearch())]
chat = client.chats.create(model=MODEL_ID, config=config, tools=tools)
local_path = os.path.join(WORKSPACE_DIR, repo_name)
os.makedirs(local_path, exist_ok=True)
state = {
"chat": chat,
"hf_token": hf_token,
"hf_username": hf_username,
"repo_name": repo_name,
"created": False,
"repo_id": None,
"local_path": local_path,
"logs": [f"Initialized workspace at {WORKSPACE_DIR}/{repo_name}."],
}
return state
def handle_message(user_msg, state):
chat = state["chat"]
logs = state.get("logs", [])
logs.append(f"> User: {user_msg}")
resp = chat.send_message(user_msg)
logs.append("Received response from Gemini.")
text = resp.text
try:
data = json.loads(text)
framework = data["framework"]
files = data.get("files", {})
reply_msg = data.get("message", "")
except Exception:
logs.append("β οΈ Failed to parse assistant JSON.\n" + text)
state["logs"] = logs
return "β οΈ Parsing error. Check logs.", state
if not state["created"]:
full_repo = f"{state['hf_username']}/{state['repo_name']}"
logs.append(f"Creating HF Space '{full_repo}' with template '{framework}'.")
create_repo(
repo_id=full_repo,
token=state["hf_token"],
exist_ok=True,
repo_type="space",
space_sdk=framework
)
state["created"] = True
state["repo_id"] = full_repo
state["embed_url"] = f"https://huggingface.co/spaces/{full_repo}"
if files:
logs.append(f"Writing {len(files)} file(s): {list(files.keys())}")
for relpath, content in files.items():
dest = os.path.join(state["local_path"], relpath)
os.makedirs(os.path.dirname(dest), exist_ok=True)
with open(dest, "w", encoding="utf-8") as f:
f.write(content)
logs.append("Uploading snapshot to Hugging Face...")
api = HfApi(token=state["hf_token"])
api.upload_folder(
folder_path=state["local_path"],
repo_id=state["repo_id"],
repo_type="space"
)
logs.append("Snapshot upload complete.")
state["logs"] = logs
return reply_msg, state
# βββ Gradio UI βββ
with gr.Blocks() as demo:
with gr.Row():
with gr.Column(scale=1):
gemini_key = gr.Textbox(label="Gemini API Key", type="password")
hf_token = gr.Textbox(label="Hugging Face Token", type="password")
hf_user = gr.Textbox(label="HF Username")
repo_name = gr.Textbox(label="New App (repo) name")
session_id = gr.Textbox(value="", visible=False)
start_btn = gr.Button("Start a new app")
with gr.Column(scale=3):
chatbot = gr.Chatbot(type="messages")
logs_display = gr.Textbox(label="Operation Logs", interactive=False, lines=8)
preview_iframe = gr.HTML("<p>No deployed app yet.</p>")
user_msg = gr.Textbox(label="Your message")
send_btn = gr.Button("Send")
def on_start(g_key, h_token, h_user, r_name):
new_id = str(uuid.uuid4())
state = start_app(g_key, h_token, h_user, r_name)
state_store[new_id] = state
logs = "\n".join(state["logs"])
return new_id, logs, "<p>Awaiting first instruction...</p>"
start_btn.click(
on_start,
inputs=[gemini_key, hf_token, hf_user, repo_name],
outputs=[session_id, logs_display, preview_iframe]
)
def on_send(msg, chat_history, sess_id):
if not sess_id or sess_id not in state_store:
err = "Error: No API found. Please start a new app."
return chat_history + [("", err)], sess_id, "", ""
state = state_store[sess_id]
reply, new_state = handle_message(msg, state)
state_store[sess_id] = new_state
chat_history = chat_history + [(msg, reply)]
logs = "\n".join(new_state.get("logs", []))
embed = ""
if new_state.get("embed_url"):
embed = f'<iframe src="{new_state["embed_url"]}" width="100%" height="500px"></iframe>'
return chat_history, sess_id, logs, embed
send_btn.click(
on_send,
inputs=[user_msg, chatbot, session_id],
outputs=[chatbot, session_id, logs_display, preview_iframe]
)
user_msg.submit(
on_send,
inputs=[user_msg, chatbot, session_id],
outputs=[chatbot, session_id, logs_display, preview_iframe]
)
demo.launch()
|