wuhp commited on
Commit
2b6e504
·
verified ·
1 Parent(s): b4a48ef

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +78 -54
app.py CHANGED
@@ -1,41 +1,48 @@
1
  import os
2
  import json
3
  import uuid
4
- import gradio as gr
5
 
 
 
6
  from google import genai
7
  from google.genai import types
8
  from google.genai.types import Tool, GoogleSearch
9
- from huggingface_hub import create_repo, HfApi
10
 
11
- # ——— Configuration ———
12
- MODEL_ID = "gemini-2.5-flash-preview-04-17"
13
- WORKSPACE_DIR = "workspace"
14
- SYSTEM_INSTRUCTION = (
 
 
15
  "You are a helpful coding assistant that scaffolds a complete Hugging Face Space app. "
16
  "Based on the user's request, decide between Gradio or Streamlit (whichever fits best), "
17
  "and respond with exactly one JSON object with keys:\n"
18
  " • \"framework\": either \"gradio\" or \"streamlit\"\n"
19
  " • \"files\": a map of relative file paths to file contents\n"
20
- " • \"message\": a human-readable summary\n"
21
  "Do not include extra text or markdown."
22
  )
23
 
24
- # In-memory session store: maps session IDs to state dicts
25
- state_store: dict[str, dict] = {}
 
 
 
 
26
 
 
 
27
 
28
- def start_app(gemini_key: str, hf_token: str, hf_username: str, repo_name: str) -> dict:
29
- """Initialises a new chat session + local workspace."""
30
  os.makedirs(WORKSPACE_DIR, exist_ok=True)
31
 
32
- # Gemini chat client
33
  client = genai.Client(api_key=gemini_key)
34
  config = types.GenerateContentConfig(system_instruction=SYSTEM_INSTRUCTION)
35
  tools = [Tool(google_search=GoogleSearch())]
36
  chat = client.chats.create(model=MODEL_ID, config=config, tools=tools)
37
 
38
- # per‑project workspace (persisted so we can push snapshots)
39
  local_path = os.path.join(WORKSPACE_DIR, repo_name)
40
  os.makedirs(local_path, exist_ok=True)
41
 
@@ -48,35 +55,38 @@ def start_app(gemini_key: str, hf_token: str, hf_username: str, repo_name: str)
48
  "repo_id": None,
49
  "local_path": local_path,
50
  "embed_url": None,
51
- "logs": [f"Initialized workspace at {WORKSPACE_DIR}/{repo_name}."],
52
  }
53
 
54
 
55
- def handle_message(user_msg: str, state: dict):
56
- """Send user message to Gemini, scaffold / update the Space and return reply text."""
57
 
58
  chat = state["chat"]
59
  logs = state.setdefault("logs", [])
60
 
61
- logs.append(f"> User: {user_msg}")
62
  resp = chat.send_message(user_msg)
63
  logs.append("Received response from Gemini.")
64
 
 
 
 
65
  try:
66
  data = json.loads(resp.text)
67
  framework: str = data["framework"]
68
- files: dict[str, str] = data.get("files", {})
69
  reply_msg: str = data.get("message", "")
70
  except Exception:
71
- logs.append("⚠️ Failed to parse assistant JSON." + "\n" + resp.text)
72
  return "⚠️ Parsing error. Check logs.", state
73
 
74
  # ---------------------------------------------------------------------
75
- # Create the Space on first run
76
  # ---------------------------------------------------------------------
77
  if not state["created"]:
78
  full_repo = f"{state['hf_username']}/{state['repo_name']}"
79
- logs.append(f"Creating HF Space '{full_repo}' (template '{framework}') …")
80
  create_repo(
81
  repo_id=full_repo,
82
  token=state["hf_token"],
@@ -91,22 +101,21 @@ def handle_message(user_msg: str, state: dict):
91
  })
92
 
93
  # ---------------------------------------------------------------------
94
- # Write/overwrite files returned by Gemini
95
  # ---------------------------------------------------------------------
96
  if files:
97
  logs.append(f"Writing {len(files)} file(s): {list(files)}")
98
  for relpath, content in files.items():
99
  dest = os.path.join(state["local_path"], relpath)
100
  os.makedirs(os.path.dirname(dest), exist_ok=True)
101
- with open(dest, "w", encoding="utf-8") as f:
102
- f.write(content)
103
 
104
  # ---------------------------------------------------------------------
105
- # Commit the snapshot to the Space
106
  # ---------------------------------------------------------------------
107
  logs.append("Uploading snapshot to Hugging Face …")
108
- api = HfApi(token=state["hf_token"])
109
- api.upload_folder(
110
  folder_path=state["local_path"],
111
  repo_id=state["repo_id"],
112
  repo_type="space",
@@ -115,53 +124,69 @@ def handle_message(user_msg: str, state: dict):
115
 
116
  return reply_msg, state
117
 
118
-
119
- # ——— Gradio UI ———
 
120
  with gr.Blocks(title="Gemini → HF Space scaffolder") as demo:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
121
  with gr.Row():
122
- # -----------------------------------------------------------------
123
- # Left column – credentials & new‑app form
124
- # -----------------------------------------------------------------
125
  with gr.Column(scale=1):
126
  gemini_key = gr.Textbox(label="Gemini API Key", type="password")
127
- hf_token = gr.Textbox(label="Hugging Face Token", type="password")
128
  hf_user = gr.Textbox(label="HF Username")
129
  repo_name = gr.Textbox(label="New App (repo) name")
130
  session_id = gr.Textbox(value="", visible=False)
131
  start_btn = gr.Button("Start a new app")
132
 
133
- # -----------------------------------------------------------------
134
- # Right column – chat & live preview
135
- # -----------------------------------------------------------------
136
  with gr.Column(scale=3):
137
  chatbot = gr.Chatbot(type="messages")
138
  logs_display = gr.Textbox(label="Operation Logs", interactive=False, lines=8)
139
  preview_iframe = gr.HTML("<p>No deployed app yet.</p>")
140
 
141
  user_msg = gr.Textbox(label="Your message")
142
- send_btn = gr.Button("Send", interactive=False) # <— disabled until a session exists
 
 
 
 
 
 
 
143
 
144
- # --------------- Callbacks ---------------
145
- def on_start(g_key, h_token, h_user, r_name):
146
  new_id = str(uuid.uuid4())
147
- state_store[new_id] = start_app(g_key, h_token, h_user, r_name)
148
  logs = "\n".join(state_store[new_id]["logs"])
149
- return (
150
- new_id, # session_id (hidden)
151
- logs, # logs_display
152
- "<p>Awaiting first instruction…</p>", # preview_iframe
153
- gr.update(interactive=True), # enable send button
154
- )
155
 
156
  start_btn.click(
157
  on_start,
158
- inputs=[gemini_key, hf_token, hf_user, repo_name],
159
  outputs=[session_id, logs_display, preview_iframe, send_btn],
160
  )
161
 
162
- def on_send(msg, chat_history, sess_id):
 
 
 
163
  if not sess_id or sess_id not in state_store:
164
- err = "Error: No session found. Please start a new app first."
165
  return chat_history + [("", err)], sess_id, "", ""
166
 
167
  reply, new_state = handle_message(msg, state_store[sess_id])
@@ -171,8 +196,7 @@ with gr.Blocks(title="Gemini → HF Space scaffolder") as demo:
171
  logs = "\n".join(new_state["logs"])
172
  embed = (
173
  f'<iframe src="{new_state["embed_url"]}" width="100%" height="500px"></iframe>'
174
- if new_state.get("embed_url")
175
- else ""
176
  )
177
  return chat_history, sess_id, logs, embed
178
 
@@ -187,8 +211,8 @@ with gr.Blocks(title="Gemini → HF Space scaffolder") as demo:
187
  outputs=[chatbot, session_id, logs_display, preview_iframe],
188
  )
189
 
190
- # -------------------------------------------------------------------------
191
- # Run the Gradio app – expose on default 7860 unless HF Space overrides it
192
- # -------------------------------------------------------------------------
193
  if __name__ == "__main__":
194
  demo.launch()
 
1
  import os
2
  import json
3
  import uuid
4
+ from typing import Any, Dict, Tuple
5
 
6
+ import gradio as gr
7
+ from huggingface_hub import create_repo, HfApi
8
  from google import genai
9
  from google.genai import types
10
  from google.genai.types import Tool, GoogleSearch
 
11
 
12
+ # -----------------------------------------------------------------------------
13
+ # Configuration
14
+ # -----------------------------------------------------------------------------
15
+ MODEL_ID: str = "gemini-2.5-flash-preview-04-17"
16
+ WORKSPACE_DIR: str = "workspace"
17
+ SYSTEM_INSTRUCTION: str = (
18
  "You are a helpful coding assistant that scaffolds a complete Hugging Face Space app. "
19
  "Based on the user's request, decide between Gradio or Streamlit (whichever fits best), "
20
  "and respond with exactly one JSON object with keys:\n"
21
  " • \"framework\": either \"gradio\" or \"streamlit\"\n"
22
  " • \"files\": a map of relative file paths to file contents\n"
23
+ " • \"message\": a humanreadable summary\n"
24
  "Do not include extra text or markdown."
25
  )
26
 
27
+ # Inmemory session store: maps session IDs -> state dicts
28
+ state_store: Dict[str, Dict[str, Any]] = {}
29
+
30
+ # -----------------------------------------------------------------------------
31
+ # Helper functions
32
+ # -----------------------------------------------------------------------------
33
 
34
+ def start_app(gemini_key: str, hf_token: str, hf_username: str, repo_name: str) -> Dict[str, Any]:
35
+ """Initialise chat with Gemini + create a local workspace for the new app."""
36
 
 
 
37
  os.makedirs(WORKSPACE_DIR, exist_ok=True)
38
 
39
+ # Gemini client & chat session
40
  client = genai.Client(api_key=gemini_key)
41
  config = types.GenerateContentConfig(system_instruction=SYSTEM_INSTRUCTION)
42
  tools = [Tool(google_search=GoogleSearch())]
43
  chat = client.chats.create(model=MODEL_ID, config=config, tools=tools)
44
 
45
+ # Local project folder (used as upload snapshot source)
46
  local_path = os.path.join(WORKSPACE_DIR, repo_name)
47
  os.makedirs(local_path, exist_ok=True)
48
 
 
55
  "repo_id": None,
56
  "local_path": local_path,
57
  "embed_url": None,
58
+ "logs": [f"Initialized workspace at {WORKSPACE_DIR}/{repo_name}."]
59
  }
60
 
61
 
62
+ def handle_message(user_msg: str, state: Dict[str, Any]) -> Tuple[str, Dict[str, Any]]:
63
+ """Send *user_msg* to Gemini, act on the JSON response, and return assistant reply."""
64
 
65
  chat = state["chat"]
66
  logs = state.setdefault("logs", [])
67
 
68
+ logs.append(f"> **User**: {user_msg}")
69
  resp = chat.send_message(user_msg)
70
  logs.append("Received response from Gemini.")
71
 
72
+ # ---------------------------------------------------------------------
73
+ # Parse Gemini JSON answer
74
+ # ---------------------------------------------------------------------
75
  try:
76
  data = json.loads(resp.text)
77
  framework: str = data["framework"]
78
+ files: Dict[str, str] = data.get("files", {})
79
  reply_msg: str = data.get("message", "")
80
  except Exception:
81
+ logs.append("⚠️ Failed to parse assistant JSON.\n" + resp.text)
82
  return "⚠️ Parsing error. Check logs.", state
83
 
84
  # ---------------------------------------------------------------------
85
+ # Create the target Space on first run
86
  # ---------------------------------------------------------------------
87
  if not state["created"]:
88
  full_repo = f"{state['hf_username']}/{state['repo_name']}"
89
+ logs.append(f"Creating HF Space **{full_repo}** (template '{framework}') …")
90
  create_repo(
91
  repo_id=full_repo,
92
  token=state["hf_token"],
 
101
  })
102
 
103
  # ---------------------------------------------------------------------
104
+ # Write / overwrite files
105
  # ---------------------------------------------------------------------
106
  if files:
107
  logs.append(f"Writing {len(files)} file(s): {list(files)}")
108
  for relpath, content in files.items():
109
  dest = os.path.join(state["local_path"], relpath)
110
  os.makedirs(os.path.dirname(dest), exist_ok=True)
111
+ with open(dest, "w", encoding="utf8") as fp:
112
+ fp.write(content)
113
 
114
  # ---------------------------------------------------------------------
115
+ # Push snapshot
116
  # ---------------------------------------------------------------------
117
  logs.append("Uploading snapshot to Hugging Face …")
118
+ HfApi(token=state["hf_token"]).upload_folder(
 
119
  folder_path=state["local_path"],
120
  repo_id=state["repo_id"],
121
  repo_type="space",
 
124
 
125
  return reply_msg, state
126
 
127
+ # -----------------------------------------------------------------------------
128
+ # Gradio UI
129
+ # -----------------------------------------------------------------------------
130
  with gr.Blocks(title="Gemini → HF Space scaffolder") as demo:
131
+ # -------------------------------------------------------------------------
132
+ # OAuth UI row (always visible)
133
+ # -------------------------------------------------------------------------
134
+ with gr.Row():
135
+ login_btn = gr.LoginButton()
136
+ logout_btn = gr.LogoutButton()
137
+ status_md = gr.Markdown("Not logged in")
138
+
139
+ # Callback to refresh the status text whenever the page loads OR OAuth changes
140
+ def show_profile(profile: gr.OAuthProfile | None): # automatically injected
141
+ if profile is None:
142
+ return gr.Markdown.update(value="*Not logged in.*")
143
+ return gr.Markdown.update(value=f"Logged in as **{profile.username}**")
144
+
145
+ demo.load(show_profile, inputs=None, outputs=status_md)
146
+
147
+ # -------------------------------------------------------------------------
148
+ # Main app controls (hidden behind OAuth)
149
+ # -------------------------------------------------------------------------
150
  with gr.Row():
 
 
 
151
  with gr.Column(scale=1):
152
  gemini_key = gr.Textbox(label="Gemini API Key", type="password")
 
153
  hf_user = gr.Textbox(label="HF Username")
154
  repo_name = gr.Textbox(label="New App (repo) name")
155
  session_id = gr.Textbox(value="", visible=False)
156
  start_btn = gr.Button("Start a new app")
157
 
 
 
 
158
  with gr.Column(scale=3):
159
  chatbot = gr.Chatbot(type="messages")
160
  logs_display = gr.Textbox(label="Operation Logs", interactive=False, lines=8)
161
  preview_iframe = gr.HTML("<p>No deployed app yet.</p>")
162
 
163
  user_msg = gr.Textbox(label="Your message")
164
+ send_btn = gr.Button("Send", interactive=False)
165
+
166
+ # ---------------------------------------------------------------------
167
+ # Callback: start a new app (requires OAuth token)
168
+ # ---------------------------------------------------------------------
169
+ def on_start(g_key: str, h_user: str, r_name: str, oauth_token: gr.OAuthToken | None):
170
+ if oauth_token is None:
171
+ return gr.Error("Please *Sign in with Hugging Face* first."), "", "", gr.update(interactive=False)
172
 
 
 
173
  new_id = str(uuid.uuid4())
174
+ state_store[new_id] = start_app(g_key, oauth_token.token, h_user, r_name)
175
  logs = "\n".join(state_store[new_id]["logs"])
176
+ return new_id, logs, "<p>Awaiting first instruction…</p>", gr.update(interactive=True)
 
 
 
 
 
177
 
178
  start_btn.click(
179
  on_start,
180
+ inputs=[gemini_key, hf_user, repo_name],
181
  outputs=[session_id, logs_display, preview_iframe, send_btn],
182
  )
183
 
184
+ # ---------------------------------------------------------------------
185
+ # Callback: send a chat message
186
+ # ---------------------------------------------------------------------
187
+ def on_send(msg: str, chat_history: list[list[str]], sess_id: str):
188
  if not sess_id or sess_id not in state_store:
189
+ err = "Error: No active session. Click *Start a new app* first."
190
  return chat_history + [("", err)], sess_id, "", ""
191
 
192
  reply, new_state = handle_message(msg, state_store[sess_id])
 
196
  logs = "\n".join(new_state["logs"])
197
  embed = (
198
  f'<iframe src="{new_state["embed_url"]}" width="100%" height="500px"></iframe>'
199
+ if new_state.get("embed_url") else ""
 
200
  )
201
  return chat_history, sess_id, logs, embed
202
 
 
211
  outputs=[chatbot, session_id, logs_display, preview_iframe],
212
  )
213
 
214
+ # -----------------------------------------------------------------------------
215
+ # Launch the Gradio app
216
+ # -----------------------------------------------------------------------------
217
  if __name__ == "__main__":
218
  demo.launch()