Spaces:
Sleeping
Sleeping
File size: 5,072 Bytes
f2231db 6146397 c44aed0 f2231db 6146397 f2231db 2e9c203 c44aed0 2e9c203 c44aed0 f2231db c44aed0 f2231db 9194337 c44aed0 f2231db 2e9c203 f2231db 2e9c203 f2231db c44aed0 f2231db c44aed0 6146397 c44aed0 6146397 c44aed0 f2231db c44aed0 2e9c203 c44aed0 2e9c203 c44aed0 2e9c203 c44aed0 f2231db c44aed0 9194337 c44aed0 2e9c203 c44aed0 d000f0a 2e9c203 c44aed0 f2231db c44aed0 f2231db c44aed0 f2231db c44aed0 f2231db c44aed0 d000f0a c44aed0 f2231db c44aed0 9194337 8e77934 45e9fba |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 |
import os
import json
import gradio as gr
from huggingface_hub import create_repo, list_models, upload_file, constants
from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status
from google import genai
from google.genai import types
# --- Globals ---
client = None
chat = None
# --- System instruction for Gemini ---
system_instruction = (
"You are a helpful assistant that writes, debugs, and pushes code to Hugging Face Spaces. "
"Treat Spaces as a sandbox: create, upload, debug. "
"Use function calling for logs and respond in JSON {success, data, message}."
)
# --- Function declarations for logs ---
get_build_logs_decl = {
"name": "get_build_logs",
"description": "Fetches build logs for a Space",
"parameters": {"type":"object","properties":{"repo_id":{"type":"string"}},"required":["repo_id"]}
}
get_container_logs_decl = {
"name": "get_container_logs",
"description": "Fetches container logs for a Space",
"parameters": {"type":"object","properties":{"repo_id":{"type":"string"}},"required":["repo_id"]}
}
tools = [ types.Tool(function_declarations=[get_build_logs_decl, get_container_logs_decl]) ]
# --- HF helpers ---
def create_space_backend(username, hf_token, repo_name, sdk):
repo_id = f"{username}/{repo_name}"
create_repo(repo_id=repo_id, token=hf_token, exist_ok=True, repo_type="space", space_sdk=sdk)
return repo_id
def fetch_logs(repo_id, level):
jwt_url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt"
r = get_session().get(jwt_url, headers=build_hf_headers())
hf_raise_for_status(r)
jwt = r.json()["token"]
url = f"https://api.hf.space/v1/{repo_id}/logs/{level}"
lines=[]
with get_session().get(url, headers=build_hf_headers(token=jwt), stream=True) as resp:
hf_raise_for_status(resp)
for raw in resp.iter_lines():
if raw.startswith(b"data: "):
try:
ev=json.loads(raw[len(b"data: "):].decode())
lines.append({"timestamp":ev.get("timestamp"),"message":ev.get("data")})
except: pass
return lines
# --- Chat init & respond ---
def init_chat(repo_name, sdk, gemini_key, hf_profile, hf_token):
global client, chat
# Validate
if hf_profile is None or hf_token is None:
return {"success":False,"data":None,"message":"Please sign in with HF."}, ""
if not gemini_key:
return {"success":False,"data":None,"message":"Missing Gemini API key."}, ""
# create space
repo_id = create_space_backend(hf_profile.username, hf_token.token, repo_name, sdk)
os.environ["HF_TOKEN"] = hf_token.token
# init Gemini
client = genai.Client(api_key=gemini_key)
chat = client.chats.create(
model="gemini-2.5-flash-preview-04-17",
config=types.GenerateContentConfig(system_instruction=system_instruction, tools=tools, temperature=0)
)
return {"success":True,"data":None,"message":f"Sandbox ready: {repo_id}"}, repo_id
def chatbot_respond(message, history, repo_id, gemini_key):
global chat
if chat is None:
history.append((None, "Error: chat not initialized."))
return history
resp = chat.send_message(message)
part = resp.candidates[0].content.parts[0]
if part.function_call:
fn=part.function_call
args=json.loads(fn.args)
level = "build" if fn.name=="get_build_logs" else "run"
logs=fetch_logs(repo_id, level)
resp2 = chat.send_message("", function_response={fn.name:logs})
reply=resp2.candidates[0].content.parts[0].text
else:
reply=part.text
history.append((message, reply))
return history
# --- UI ---
with gr.Blocks() as demo:
gr.Markdown("# HF Code Sandbox Chat")
# login
login_btn = gr.LoginButton("Sign in with HF", variant="huggingface")
login_status = gr.Markdown("*Not signed in.*")
models_md = gr.Markdown()
login_btn.click(lambda p: show_profile(p), inputs=[login_btn], outputs=[login_status])
login_btn.click(lambda p, t: list_private_models(p,t), inputs=[login_btn, login_btn.token], outputs=[models_md])
with gr.Row():
with gr.Column(scale=2):
gr.Markdown("## Setup Sandbox")
gemini_key = gr.Textbox(label="Gemini API Key", type="password")
repo_name = gr.Textbox(label="Space Name")
sdk = gr.Radio(choices=["gradio","streamlit"], label="SDK", value="gradio")
init_btn = gr.Button("Initialize Sandbox")
init_status = gr.JSON()
repo_store = gr.State("")
init_btn.click(init_chat, inputs=[repo_name, sdk, gemini_key, login_btn, login_btn.token], outputs=[init_status, repo_store])
with gr.Column(scale=8):
chatbot = gr.Chatbot(type="messages")
user_input = gr.Textbox(show_label=False, placeholder="Ask to write/debug code...")
user_input.submit(chatbot_respond, inputs=[user_input, chatbot, repo_store, gemini_key], outputs=[chatbot])
if __name__ == "__main__":
demo.launch()
|