wuhp commited on
Commit
10a9edd
·
verified ·
1 Parent(s): c44aed0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +52 -36
app.py CHANGED
@@ -6,15 +6,30 @@ from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_st
6
  from google import genai
7
  from google.genai import types
8
 
9
- # --- Globals ---
10
- client = None
11
- chat = None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
 
13
  # --- System instruction for Gemini ---
14
  system_instruction = (
15
  "You are a helpful assistant that writes, debugs, and pushes code to Hugging Face Spaces. "
16
- "Treat Spaces as a sandbox: create, upload, debug. "
17
- "Use function calling for logs and respond in JSON {success, data, message}."
18
  )
19
 
20
  # --- Function declarations for logs ---
@@ -31,48 +46,46 @@ get_container_logs_decl = {
31
  tools = [ types.Tool(function_declarations=[get_build_logs_decl, get_container_logs_decl]) ]
32
 
33
  # --- HF helpers ---
34
- def create_space_backend(username, hf_token, repo_name, sdk):
35
  repo_id = f"{username}/{repo_name}"
36
  create_repo(repo_id=repo_id, token=hf_token, exist_ok=True, repo_type="space", space_sdk=sdk)
37
  return repo_id
38
 
39
- def fetch_logs(repo_id, level):
40
  jwt_url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt"
41
  r = get_session().get(jwt_url, headers=build_hf_headers())
42
  hf_raise_for_status(r)
43
  jwt = r.json()["token"]
44
- url = f"https://api.hf.space/v1/{repo_id}/logs/{level}"
45
- lines=[]
46
- with get_session().get(url, headers=build_hf_headers(token=jwt), stream=True) as resp:
47
  hf_raise_for_status(resp)
48
  for raw in resp.iter_lines():
49
  if raw.startswith(b"data: "):
50
  try:
51
- ev=json.loads(raw[len(b"data: "):].decode())
52
- lines.append({"timestamp":ev.get("timestamp"),"message":ev.get("data")})
53
- except: pass
54
- return lines
 
55
 
56
  # --- Chat init & respond ---
57
- def init_chat(repo_name, sdk, gemini_key, hf_profile, hf_token):
58
  global client, chat
59
- # Validate
60
  if hf_profile is None or hf_token is None:
61
- return {"success":False,"data":None,"message":"Please sign in with HF."}, ""
62
  if not gemini_key:
63
- return {"success":False,"data":None,"message":"Missing Gemini API key."}, ""
64
- # create space
65
  repo_id = create_space_backend(hf_profile.username, hf_token.token, repo_name, sdk)
66
  os.environ["HF_TOKEN"] = hf_token.token
67
- # init Gemini
68
  client = genai.Client(api_key=gemini_key)
69
  chat = client.chats.create(
70
  model="gemini-2.5-flash-preview-04-17",
71
  config=types.GenerateContentConfig(system_instruction=system_instruction, tools=tools, temperature=0)
72
  )
73
- return {"success":True,"data":None,"message":f"Sandbox ready: {repo_id}"}, repo_id
74
 
75
- def chatbot_respond(message, history, repo_id, gemini_key):
76
  global chat
77
  if chat is None:
78
  history.append((None, "Error: chat not initialized."))
@@ -80,40 +93,43 @@ def chatbot_respond(message, history, repo_id, gemini_key):
80
  resp = chat.send_message(message)
81
  part = resp.candidates[0].content.parts[0]
82
  if part.function_call:
83
- fn=part.function_call
84
- args=json.loads(fn.args)
85
- level = "build" if fn.name=="get_build_logs" else "run"
86
- logs=fetch_logs(repo_id, level)
87
- resp2 = chat.send_message("", function_response={fn.name:logs})
88
- reply=resp2.candidates[0].content.parts[0].text
89
  else:
90
- reply=part.text
91
  history.append((message, reply))
92
  return history
93
 
94
  # --- UI ---
95
  with gr.Blocks() as demo:
96
  gr.Markdown("# HF Code Sandbox Chat")
97
- # login
 
98
  login_btn = gr.LoginButton("Sign in with HF", variant="huggingface")
99
- login_status = gr.Markdown("*Not signed in.*")
100
  models_md = gr.Markdown()
101
- login_btn.click(lambda p: show_profile(p), inputs=[login_btn], outputs=[login_status])
102
- login_btn.click(lambda p, t: list_private_models(p,t), inputs=[login_btn, login_btn.token], outputs=[models_md])
103
 
 
104
  with gr.Row():
105
  with gr.Column(scale=2):
106
- gr.Markdown("## Setup Sandbox")
107
  gemini_key = gr.Textbox(label="Gemini API Key", type="password")
108
  repo_name = gr.Textbox(label="Space Name")
109
- sdk = gr.Radio(choices=["gradio","streamlit"], label="SDK", value="gradio")
110
  init_btn = gr.Button("Initialize Sandbox")
111
  init_status = gr.JSON()
112
  repo_store = gr.State("")
113
  init_btn.click(init_chat, inputs=[repo_name, sdk, gemini_key, login_btn, login_btn.token], outputs=[init_status, repo_store])
 
114
  with gr.Column(scale=8):
115
  chatbot = gr.Chatbot(type="messages")
116
- user_input = gr.Textbox(show_label=False, placeholder="Ask to write/debug code...")
117
  user_input.submit(chatbot_respond, inputs=[user_input, chatbot, repo_store, gemini_key], outputs=[chatbot])
118
 
119
  if __name__ == "__main__":
 
6
  from google import genai
7
  from google.genai import types
8
 
9
+ # USER INFO & MODEL LISTING —
10
+
11
+ def show_profile(profile: gr.OAuthProfile | None) -> str:
12
+ if profile is None:
13
+ return "*Not signed in.*"
14
+ return f"✅ Signed in as **{profile.username}**"
15
+
16
+ def list_private_models(profile: gr.OAuthProfile | None, oauth_token: gr.OAuthToken | None) -> str:
17
+ if profile is None or oauth_token is None:
18
+ return "Please sign in to HF."
19
+ models = list_models(author=profile.username, token=oauth_token.token)
20
+ if not models:
21
+ return "No models found."
22
+ return "Models:\n" + "\n".join(f"- {m.id} ({'private' if m.private else 'public'})" for m in models)
23
+
24
+ # --- GLOBALS ---
25
+ client: genai.Client | None = None
26
+ chat: genai.chats.Chat | None = None
27
 
28
  # --- System instruction for Gemini ---
29
  system_instruction = (
30
  "You are a helpful assistant that writes, debugs, and pushes code to Hugging Face Spaces. "
31
+ "Treat Spaces as a sandbox: create spaces, upload code, and debug via function calling. "
32
+ "Always respond in JSON with {'success','data','message'}."
33
  )
34
 
35
  # --- Function declarations for logs ---
 
46
  tools = [ types.Tool(function_declarations=[get_build_logs_decl, get_container_logs_decl]) ]
47
 
48
  # --- HF helpers ---
49
+ def create_space_backend(username: str, hf_token: str, repo_name: str, sdk: str) -> str:
50
  repo_id = f"{username}/{repo_name}"
51
  create_repo(repo_id=repo_id, token=hf_token, exist_ok=True, repo_type="space", space_sdk=sdk)
52
  return repo_id
53
 
54
+ def fetch_logs(repo_id: str, level: str) -> list[dict]:
55
  jwt_url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt"
56
  r = get_session().get(jwt_url, headers=build_hf_headers())
57
  hf_raise_for_status(r)
58
  jwt = r.json()["token"]
59
+ logs_url = f"https://api.hf.space/v1/{repo_id}/logs/{level}"
60
+ records = []
61
+ with get_session().get(logs_url, headers=build_hf_headers(token=jwt), stream=True) as resp:
62
  hf_raise_for_status(resp)
63
  for raw in resp.iter_lines():
64
  if raw.startswith(b"data: "):
65
  try:
66
+ ev = json.loads(raw[len(b"data: "):].decode())
67
+ records.append({"timestamp": ev.get("timestamp"), "message": ev.get("data")})
68
+ except:
69
+ pass
70
+ return records
71
 
72
  # --- Chat init & respond ---
73
+ def init_chat(repo_name: str, sdk: str, gemini_key: str, hf_profile: gr.OAuthProfile, hf_token: gr.OAuthToken):
74
  global client, chat
 
75
  if hf_profile is None or hf_token is None:
76
+ return {"success": False, "data": None, "message": "Please sign in with Hugging Face."}, ""
77
  if not gemini_key:
78
+ return {"success": False, "data": None, "message": "Missing Gemini API key."}, ""
 
79
  repo_id = create_space_backend(hf_profile.username, hf_token.token, repo_name, sdk)
80
  os.environ["HF_TOKEN"] = hf_token.token
 
81
  client = genai.Client(api_key=gemini_key)
82
  chat = client.chats.create(
83
  model="gemini-2.5-flash-preview-04-17",
84
  config=types.GenerateContentConfig(system_instruction=system_instruction, tools=tools, temperature=0)
85
  )
86
+ return {"success": True, "data": None, "message": f"Sandbox ready: {repo_id}"}, repo_id
87
 
88
+ def chatbot_respond(message: str, history: list, repo_id: str, gemini_key: str):
89
  global chat
90
  if chat is None:
91
  history.append((None, "Error: chat not initialized."))
 
93
  resp = chat.send_message(message)
94
  part = resp.candidates[0].content.parts[0]
95
  if part.function_call:
96
+ fn = part.function_call
97
+ args = json.loads(fn.args)
98
+ level = "build" if fn.name == "get_build_logs" else "run"
99
+ logs = fetch_logs(repo_id, level)
100
+ resp2 = chat.send_message("", function_response={fn.name: logs})
101
+ reply = resp2.candidates[0].content.parts[0].text
102
  else:
103
+ reply = part.text
104
  history.append((message, reply))
105
  return history
106
 
107
  # --- UI ---
108
  with gr.Blocks() as demo:
109
  gr.Markdown("# HF Code Sandbox Chat")
110
+
111
+ # --- Hugging Face Login ---
112
  login_btn = gr.LoginButton("Sign in with HF", variant="huggingface")
113
+ status_md = gr.Markdown("*Not signed in.*")
114
  models_md = gr.Markdown()
115
+ login_btn.click(show_profile, inputs=[login_btn], outputs=[status_md])
116
+ login_btn.click(list_private_models, inputs=[login_btn, login_btn.token], outputs=[models_md])
117
 
118
+ # --- Layout ---
119
  with gr.Row():
120
  with gr.Column(scale=2):
121
+ gr.Markdown("### Setup Sandbox")
122
  gemini_key = gr.Textbox(label="Gemini API Key", type="password")
123
  repo_name = gr.Textbox(label="Space Name")
124
+ sdk = gr.Radio(choices=["gradio", "streamlit"], label="SDK", value="gradio")
125
  init_btn = gr.Button("Initialize Sandbox")
126
  init_status = gr.JSON()
127
  repo_store = gr.State("")
128
  init_btn.click(init_chat, inputs=[repo_name, sdk, gemini_key, login_btn, login_btn.token], outputs=[init_status, repo_store])
129
+
130
  with gr.Column(scale=8):
131
  chatbot = gr.Chatbot(type="messages")
132
+ user_input = gr.Textbox(show_label=False, placeholder="Ask the sandbox to write/debug code...")
133
  user_input.submit(chatbot_respond, inputs=[user_input, chatbot, repo_store, gemini_key], outputs=[chatbot])
134
 
135
  if __name__ == "__main__":