Spaces:
Sleeping
Sleeping
File size: 5,934 Bytes
f2231db 6146397 c44aed0 f2231db 6146397 f2231db 10a9edd 2e9c203 c44aed0 f2231db 10a9edd f2231db 9194337 c44aed0 f2231db 2e9c203 f2231db 2e9c203 f2231db c44aed0 f2231db c44aed0 10a9edd c44aed0 10a9edd 6146397 10a9edd 6146397 c44aed0 10a9edd f2231db c44aed0 10a9edd 2e9c203 10a9edd c44aed0 10a9edd 2e9c203 c44aed0 f2231db c44aed0 9194337 10a9edd 2e9c203 10a9edd d000f0a 2e9c203 c44aed0 f2231db 10a9edd f2231db 10a9edd f2231db c44aed0 10a9edd c44aed0 10a9edd c44aed0 10a9edd f2231db 10a9edd f2231db 10a9edd c44aed0 10a9edd c44aed0 d000f0a c44aed0 10a9edd f2231db c44aed0 10a9edd c44aed0 9194337 8e77934 45e9fba |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 |
import os
import json
import gradio as gr
from huggingface_hub import create_repo, list_models, upload_file, constants
from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status
from google import genai
from google.genai import types
# — USER INFO & MODEL LISTING —
def show_profile(profile: gr.OAuthProfile | None) -> str:
if profile is None:
return "*Not signed in.*"
return f"✅ Signed in as **{profile.username}**"
def list_private_models(profile: gr.OAuthProfile | None, oauth_token: gr.OAuthToken | None) -> str:
if profile is None or oauth_token is None:
return "Please sign in to HF."
models = list_models(author=profile.username, token=oauth_token.token)
if not models:
return "No models found."
return "Models:\n" + "\n".join(f"- {m.id} ({'private' if m.private else 'public'})" for m in models)
# --- GLOBALS ---
client: genai.Client | None = None
chat: genai.chats.Chat | None = None
# --- System instruction for Gemini ---
system_instruction = (
"You are a helpful assistant that writes, debugs, and pushes code to Hugging Face Spaces. "
"Treat Spaces as a sandbox: create spaces, upload code, and debug via function calling. "
"Always respond in JSON with {'success','data','message'}."
)
# --- Function declarations for logs ---
get_build_logs_decl = {
"name": "get_build_logs",
"description": "Fetches build logs for a Space",
"parameters": {"type":"object","properties":{"repo_id":{"type":"string"}},"required":["repo_id"]}
}
get_container_logs_decl = {
"name": "get_container_logs",
"description": "Fetches container logs for a Space",
"parameters": {"type":"object","properties":{"repo_id":{"type":"string"}},"required":["repo_id"]}
}
tools = [ types.Tool(function_declarations=[get_build_logs_decl, get_container_logs_decl]) ]
# --- HF helpers ---
def create_space_backend(username: str, hf_token: str, repo_name: str, sdk: str) -> str:
repo_id = f"{username}/{repo_name}"
create_repo(repo_id=repo_id, token=hf_token, exist_ok=True, repo_type="space", space_sdk=sdk)
return repo_id
def fetch_logs(repo_id: str, level: str) -> list[dict]:
jwt_url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt"
r = get_session().get(jwt_url, headers=build_hf_headers())
hf_raise_for_status(r)
jwt = r.json()["token"]
logs_url = f"https://api.hf.space/v1/{repo_id}/logs/{level}"
records = []
with get_session().get(logs_url, headers=build_hf_headers(token=jwt), stream=True) as resp:
hf_raise_for_status(resp)
for raw in resp.iter_lines():
if raw.startswith(b"data: "):
try:
ev = json.loads(raw[len(b"data: "):].decode())
records.append({"timestamp": ev.get("timestamp"), "message": ev.get("data")})
except:
pass
return records
# --- Chat init & respond ---
def init_chat(repo_name: str, sdk: str, gemini_key: str, hf_profile: gr.OAuthProfile, hf_token: gr.OAuthToken):
global client, chat
if hf_profile is None or hf_token is None:
return {"success": False, "data": None, "message": "Please sign in with Hugging Face."}, ""
if not gemini_key:
return {"success": False, "data": None, "message": "Missing Gemini API key."}, ""
repo_id = create_space_backend(hf_profile.username, hf_token.token, repo_name, sdk)
os.environ["HF_TOKEN"] = hf_token.token
client = genai.Client(api_key=gemini_key)
chat = client.chats.create(
model="gemini-2.5-flash-preview-04-17",
config=types.GenerateContentConfig(system_instruction=system_instruction, tools=tools, temperature=0)
)
return {"success": True, "data": None, "message": f"Sandbox ready: {repo_id}"}, repo_id
def chatbot_respond(message: str, history: list, repo_id: str, gemini_key: str):
global chat
if chat is None:
history.append((None, "Error: chat not initialized."))
return history
resp = chat.send_message(message)
part = resp.candidates[0].content.parts[0]
if part.function_call:
fn = part.function_call
args = json.loads(fn.args)
level = "build" if fn.name == "get_build_logs" else "run"
logs = fetch_logs(repo_id, level)
resp2 = chat.send_message("", function_response={fn.name: logs})
reply = resp2.candidates[0].content.parts[0].text
else:
reply = part.text
history.append((message, reply))
return history
# --- UI ---
with gr.Blocks() as demo:
gr.Markdown("# HF Code Sandbox Chat")
# --- Hugging Face Login ---
login_btn = gr.LoginButton("Sign in with HF", variant="huggingface")
status_md = gr.Markdown("*Not signed in.*")
models_md = gr.Markdown()
login_btn.click(show_profile, inputs=[login_btn], outputs=[status_md])
login_btn.click(list_private_models, inputs=[login_btn, login_btn.token], outputs=[models_md])
# --- Layout ---
with gr.Row():
with gr.Column(scale=2):
gr.Markdown("### Setup Sandbox")
gemini_key = gr.Textbox(label="Gemini API Key", type="password")
repo_name = gr.Textbox(label="Space Name")
sdk = gr.Radio(choices=["gradio", "streamlit"], label="SDK", value="gradio")
init_btn = gr.Button("Initialize Sandbox")
init_status = gr.JSON()
repo_store = gr.State("")
init_btn.click(init_chat, inputs=[repo_name, sdk, gemini_key, login_btn, login_btn.token], outputs=[init_status, repo_store])
with gr.Column(scale=8):
chatbot = gr.Chatbot(type="messages")
user_input = gr.Textbox(show_label=False, placeholder="Ask the sandbox to write/debug code...")
user_input.submit(chatbot_respond, inputs=[user_input, chatbot, repo_store, gemini_key], outputs=[chatbot])
if __name__ == "__main__":
demo.launch()
|