Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -114,7 +114,6 @@ def configure_gemini(api_key: str | None, model_name: str | None) -> str:
|
|
114 |
return "Please select a Gemini model."
|
115 |
try:
|
116 |
genai.configure(api_key=api_key)
|
117 |
-
# Test using the selected model
|
118 |
genai.GenerativeModel(model_name).generate_content("ping")
|
119 |
return f"Gemini configured successfully with **{model_name}**."
|
120 |
except Exception as e:
|
@@ -132,7 +131,7 @@ def call_gemini(prompt: str, api_key: str, model_name: str) -> str:
|
|
132 |
return f"Error calling Gemini API with {model_name}: {e}"
|
133 |
|
134 |
|
135 |
-
# --- AI workflow logic
|
136 |
|
137 |
def ai_workflow_chat(
|
138 |
message: str,
|
@@ -155,7 +154,6 @@ def ai_workflow_chat(
|
|
155 |
str,
|
156 |
str
|
157 |
]:
|
158 |
-
# Append user message
|
159 |
history.append([message, None])
|
160 |
bot_message = ""
|
161 |
new_repo_id = repo_id_state
|
@@ -164,13 +162,98 @@ def ai_workflow_chat(
|
|
164 |
updated_container = container_logs
|
165 |
updated_build = build_logs
|
166 |
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
171 |
|
172 |
-
|
173 |
-
|
|
|
|
|
174 |
|
175 |
return history, new_repo_id, new_workflow, updated_preview, updated_container, updated_build
|
176 |
|
@@ -178,13 +261,13 @@ def ai_workflow_chat(
|
|
178 |
# --- Build the Gradio UI ---
|
179 |
|
180 |
with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
181 |
-
hf_profile
|
182 |
-
hf_token
|
183 |
-
gemini_key
|
184 |
-
gemini_model
|
185 |
-
repo_id
|
186 |
-
workflow
|
187 |
-
sdk_state
|
188 |
|
189 |
with gr.Row():
|
190 |
# Sidebar
|
@@ -193,11 +276,9 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
193 |
login_status = gr.Markdown("*Not logged in.*")
|
194 |
login_btn = gr.LoginButton(variant="huggingface")
|
195 |
|
196 |
-
# init & update login status
|
197 |
ai_builder_tab.load(show_profile, outputs=login_status)
|
198 |
login_btn.click(show_profile, outputs=login_status)
|
199 |
-
login_btn.click(lambda
|
200 |
-
outputs=[hf_profile, hf_token])
|
201 |
|
202 |
gr.Markdown("## Google AI Studio API Key")
|
203 |
gemini_input = gr.Textbox(label="API Key", type="password")
|
@@ -215,8 +296,8 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
215 |
)
|
216 |
model_selector.change(lambda m: m, inputs=model_selector, outputs=gemini_model)
|
217 |
|
218 |
-
# configure Gemini
|
219 |
-
|
220 |
configure_gemini,
|
221 |
inputs=[gemini_key, gemini_model],
|
222 |
outputs=[gemini_status]
|
@@ -242,7 +323,6 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
242 |
user_input = gr.Textbox(placeholder="Type your message…")
|
243 |
send_btn = gr.Button("Send", interactive=False)
|
244 |
|
245 |
-
# enable send only when logged in & key & model selected
|
246 |
ai_builder_tab.load(
|
247 |
lambda p, k, m: gr.update(interactive=bool(p and k and m)),
|
248 |
inputs=[hf_profile, gemini_key, gemini_model],
|
@@ -269,11 +349,10 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
269 |
run_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False)
|
270 |
|
271 |
def wrap_chat(msg, history, prof, tok, key, model, rid, wf, sdk, prev, run_l, build_l):
|
272 |
-
|
273 |
msg, history, prof, tok, key, model, rid, wf, sdk, prev, run_l, build_l
|
274 |
)
|
275 |
-
|
276 |
-
return [(u or "", v or "") for u, v in hist], new_rid, new_wf, new_prev, new_run, new_build
|
277 |
|
278 |
send_btn.click(
|
279 |
wrap_chat,
|
@@ -292,7 +371,93 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
292 |
)
|
293 |
|
294 |
with gr.Blocks(title="Manual Hugging Face Space Manager") as manual_control_tab:
|
295 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
296 |
|
297 |
demo = gr.TabbedInterface(
|
298 |
[ai_builder_tab, manual_control_tab],
|
|
|
114 |
return "Please select a Gemini model."
|
115 |
try:
|
116 |
genai.configure(api_key=api_key)
|
|
|
117 |
genai.GenerativeModel(model_name).generate_content("ping")
|
118 |
return f"Gemini configured successfully with **{model_name}**."
|
119 |
except Exception as e:
|
|
|
131 |
return f"Error calling Gemini API with {model_name}: {e}"
|
132 |
|
133 |
|
134 |
+
# --- AI workflow logic ---
|
135 |
|
136 |
def ai_workflow_chat(
|
137 |
message: str,
|
|
|
154 |
str,
|
155 |
str
|
156 |
]:
|
|
|
157 |
history.append([message, None])
|
158 |
bot_message = ""
|
159 |
new_repo_id = repo_id_state
|
|
|
162 |
updated_container = container_logs
|
163 |
updated_build = build_logs
|
164 |
|
165 |
+
try:
|
166 |
+
if not hf_profile or not hf_token:
|
167 |
+
bot_message = "Please log in to Hugging Face first."
|
168 |
+
new_workflow = "awaiting_login"
|
169 |
+
elif not gemini_api_key or not gemini_model:
|
170 |
+
bot_message = "Please enter your API key and select a Gemini model."
|
171 |
+
new_workflow = "awaiting_api_key"
|
172 |
+
elif (new_workflow == "idle" or "create" in message.lower()) and not new_repo_id:
|
173 |
+
bot_message = "What should the Space be called? (e.g., `my-awesome-app`)"
|
174 |
+
new_workflow = "awaiting_repo_name"
|
175 |
+
elif new_workflow == "awaiting_repo_name":
|
176 |
+
repo_name = message.strip()
|
177 |
+
if not repo_name:
|
178 |
+
bot_message = "Please provide a valid Space name."
|
179 |
+
else:
|
180 |
+
bot_message = f"Creating Space `{hf_profile.username}/{repo_name}`..."
|
181 |
+
new_repo_id, iframe_html = create_space_action(repo_name, space_sdk, hf_profile, hf_token)
|
182 |
+
updated_preview = iframe_html
|
183 |
+
bot_message += "\n✅ Space created."
|
184 |
+
new_workflow = "awaiting_app_description"
|
185 |
+
elif new_workflow in ("awaiting_app_description", "debugging"):
|
186 |
+
if new_workflow == "awaiting_app_description":
|
187 |
+
app_desc = message
|
188 |
+
bot_message = f"Generating code for a `{space_sdk}` app based on: '{app_desc}'..."
|
189 |
+
prompt = f"""
|
190 |
+
You are an AI assistant specializing in Hugging Face Spaces using the {space_sdk} SDK.
|
191 |
+
Generate a full, single-file Python app based on:
|
192 |
+
'{app_desc}'
|
193 |
+
Return **only** the code block (```python ...```).
|
194 |
+
"""
|
195 |
+
else:
|
196 |
+
debug_instr = message
|
197 |
+
logs = get_container_logs_action(new_repo_id, hf_profile, hf_token)
|
198 |
+
bot_message = f"Analyzing logs and applying fixes: '{debug_instr}'..."
|
199 |
+
prompt = f"""
|
200 |
+
You are debugging a {space_sdk} Space.
|
201 |
+
Logs:
|
202 |
+
{logs}
|
203 |
+
User instructions:
|
204 |
+
'{debug_instr}'
|
205 |
+
Generate a fixed, single-file Python app. Return only the ```python``` code block.
|
206 |
+
"""
|
207 |
+
new_workflow = "generating_code"
|
208 |
+
resp = call_gemini(prompt, gemini_api_key, gemini_model)
|
209 |
+
start = resp.find("```python")
|
210 |
+
end = resp.rfind("```")
|
211 |
+
if start != -1 and end != -1 and end > start:
|
212 |
+
code = resp[start + len("```python"):end].strip()
|
213 |
+
bot_message += "\n✅ Code generated. Uploading..."
|
214 |
+
new_workflow = "uploading_code"
|
215 |
+
upload_log = upload_file_to_space_action(code, "app.py", new_repo_id, hf_profile, hf_token)
|
216 |
+
bot_message += "\n" + upload_log
|
217 |
+
if "✅ Uploaded" in upload_log:
|
218 |
+
bot_message += "\nThe Space is now rebuilding. Say 'check logs' to fetch them."
|
219 |
+
new_workflow = "awaiting_log_check"
|
220 |
+
updated_preview = f'<iframe src="https://huggingface.co/spaces/{new_repo_id}" width="100%" height="500px"></iframe>'
|
221 |
+
else:
|
222 |
+
new_workflow = "idle"
|
223 |
+
else:
|
224 |
+
bot_message += f"\n⚠️ Could not parse code from Gemini.\nResponse:\n{resp}"
|
225 |
+
new_workflow = "awaiting_app_description"
|
226 |
+
elif new_workflow == "awaiting_log_check" and "check logs" in message.lower():
|
227 |
+
bot_message = "Fetching logs..."
|
228 |
+
updated_container = get_container_logs_action(new_repo_id, hf_profile, hf_token)
|
229 |
+
updated_build = get_build_logs_action(new_repo_id, hf_profile, hf_token)
|
230 |
+
bot_message += "\n✅ Logs updated. Describe any errors or say 'generate fix'."
|
231 |
+
new_workflow = "reviewing_logs"
|
232 |
+
elif new_workflow == "reviewing_logs" and "generate fix" in message.lower():
|
233 |
+
latest = get_container_logs_action(new_repo_id, hf_profile, hf_token)
|
234 |
+
if "Error" not in latest and "Exception" not in latest:
|
235 |
+
bot_message = "No clear error found. What should I fix?"
|
236 |
+
new_workflow = "reviewing_logs"
|
237 |
+
else:
|
238 |
+
bot_message = "Generating a fix based on logs..."
|
239 |
+
new_workflow = "debugging"
|
240 |
+
elif "reset" in message.lower():
|
241 |
+
bot_message = "Workflow reset."
|
242 |
+
new_repo_id = None
|
243 |
+
updated_preview = "<p>No Space created yet.</p>"
|
244 |
+
updated_container = ""
|
245 |
+
updated_build = ""
|
246 |
+
new_workflow = "idle"
|
247 |
+
else:
|
248 |
+
bot_message = "Command not recognized. Try 'create', 'check logs', 'generate fix', or 'reset'."
|
249 |
+
except Exception as e:
|
250 |
+
bot_message = f"Unexpected error: {e}"
|
251 |
+
new_workflow = "idle"
|
252 |
|
253 |
+
if history and history[-1][1] is None:
|
254 |
+
history[-1][1] = bot_message
|
255 |
+
else:
|
256 |
+
history.append([None, bot_message])
|
257 |
|
258 |
return history, new_repo_id, new_workflow, updated_preview, updated_container, updated_build
|
259 |
|
|
|
261 |
# --- Build the Gradio UI ---
|
262 |
|
263 |
with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
264 |
+
hf_profile = gr.State(None)
|
265 |
+
hf_token = gr.State(None)
|
266 |
+
gemini_key = gr.State(None)
|
267 |
+
gemini_model = gr.State("gemini-2.5-pro-preview-03-25")
|
268 |
+
repo_id = gr.State(None)
|
269 |
+
workflow = gr.State("idle")
|
270 |
+
sdk_state = gr.State("gradio")
|
271 |
|
272 |
with gr.Row():
|
273 |
# Sidebar
|
|
|
276 |
login_status = gr.Markdown("*Not logged in.*")
|
277 |
login_btn = gr.LoginButton(variant="huggingface")
|
278 |
|
|
|
279 |
ai_builder_tab.load(show_profile, outputs=login_status)
|
280 |
login_btn.click(show_profile, outputs=login_status)
|
281 |
+
login_btn.click(lambda p, t: (p, t), outputs=[hf_profile, hf_token])
|
|
|
282 |
|
283 |
gr.Markdown("## Google AI Studio API Key")
|
284 |
gemini_input = gr.Textbox(label="API Key", type="password")
|
|
|
296 |
)
|
297 |
model_selector.change(lambda m: m, inputs=model_selector, outputs=gemini_model)
|
298 |
|
299 |
+
# configure Gemini on key or model change
|
300 |
+
ai_builder_tab.load(
|
301 |
configure_gemini,
|
302 |
inputs=[gemini_key, gemini_model],
|
303 |
outputs=[gemini_status]
|
|
|
323 |
user_input = gr.Textbox(placeholder="Type your message…")
|
324 |
send_btn = gr.Button("Send", interactive=False)
|
325 |
|
|
|
326 |
ai_builder_tab.load(
|
327 |
lambda p, k, m: gr.update(interactive=bool(p and k and m)),
|
328 |
inputs=[hf_profile, gemini_key, gemini_model],
|
|
|
349 |
run_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False)
|
350 |
|
351 |
def wrap_chat(msg, history, prof, tok, key, model, rid, wf, sdk, prev, run_l, build_l):
|
352 |
+
new_hist, new_rid, new_wf, new_prev, new_run, new_build = ai_workflow_chat(
|
353 |
msg, history, prof, tok, key, model, rid, wf, sdk, prev, run_l, build_l
|
354 |
)
|
355 |
+
return [(u or "", v or "") for u, v in new_hist], new_rid, new_wf, new_prev, new_run, new_build
|
|
|
356 |
|
357 |
send_btn.click(
|
358 |
wrap_chat,
|
|
|
371 |
)
|
372 |
|
373 |
with gr.Blocks(title="Manual Hugging Face Space Manager") as manual_control_tab:
|
374 |
+
manual_profile = gr.State(None)
|
375 |
+
manual_token = gr.State(None)
|
376 |
+
manual_repo = gr.State(None)
|
377 |
+
|
378 |
+
gr.Markdown("## Manual Sign-In & Space Management")
|
379 |
+
manual_login_btn = gr.LoginButton(variant="huggingface", size="lg")
|
380 |
+
manual_status = gr.Markdown("*Not logged in.*")
|
381 |
+
manual_models = gr.Markdown()
|
382 |
+
|
383 |
+
manual_control_tab.load(show_profile, outputs=manual_status)
|
384 |
+
manual_login_btn.click(show_profile, outputs=manual_status)
|
385 |
+
manual_control_tab.load(list_private_models, outputs=manual_models)
|
386 |
+
manual_login_btn.click(list_private_models, outputs=manual_models)
|
387 |
+
manual_login_btn.click(lambda p, t: (p, t), outputs=[manual_profile, manual_token])
|
388 |
+
|
389 |
+
manual_repo_name = gr.Textbox(label="New Space name", placeholder="my-space")
|
390 |
+
manual_sdk_sel = gr.Radio(choices=["gradio","streamlit"], value="gradio", label="Template SDK")
|
391 |
+
manual_create_btn = gr.Button("Create Space", interactive=False)
|
392 |
+
manual_create_logs = gr.Textbox(label="Create Logs", lines=3, interactive=False)
|
393 |
+
manual_preview = gr.HTML("<p>No Space created yet.</p>")
|
394 |
+
|
395 |
+
manual_control_tab.load(
|
396 |
+
lambda p, t: gr.update(interactive=bool(p and t)),
|
397 |
+
inputs=[manual_profile, manual_token],
|
398 |
+
outputs=[manual_create_btn]
|
399 |
+
)
|
400 |
+
manual_login_btn.click(
|
401 |
+
lambda p, t: gr.update(interactive=bool(p and t)),
|
402 |
+
inputs=[manual_profile, manual_token],
|
403 |
+
outputs=[manual_create_btn]
|
404 |
+
)
|
405 |
+
|
406 |
+
manual_create_btn.click(
|
407 |
+
create_space_action,
|
408 |
+
inputs=[manual_repo_name, manual_sdk_sel, manual_profile, manual_token],
|
409 |
+
outputs=[manual_repo, manual_preview]
|
410 |
+
).then(lambda _: "", outputs=[manual_create_logs])
|
411 |
+
|
412 |
+
manual_path = gr.Textbox(label="Path in Space", value="app.py")
|
413 |
+
manual_file = gr.File(label="Select file")
|
414 |
+
manual_up_btn = gr.Button("Upload File", interactive=False)
|
415 |
+
manual_up_log = gr.Textbox(label="Upload Logs", lines=2, interactive=False)
|
416 |
+
|
417 |
+
manual_control_tab.load(
|
418 |
+
lambda rid, p, t: gr.update(interactive=bool(rid and p and t)),
|
419 |
+
inputs=[manual_repo, manual_profile, manual_token],
|
420 |
+
outputs=[manual_up_btn]
|
421 |
+
)
|
422 |
+
manual_login_btn.click(
|
423 |
+
lambda rid, p, t: gr.update(interactive=bool(rid and p and t)),
|
424 |
+
inputs=[manual_repo, manual_profile, manual_token],
|
425 |
+
outputs=[manual_up_btn]
|
426 |
+
)
|
427 |
+
|
428 |
+
manual_up_btn.click(
|
429 |
+
upload_file_to_space_action,
|
430 |
+
inputs=[manual_file, manual_path, manual_repo, manual_profile, manual_token],
|
431 |
+
outputs=[manual_up_log]
|
432 |
+
)
|
433 |
+
|
434 |
+
manual_build_btn = gr.Button("Fetch Build Logs", interactive=False)
|
435 |
+
manual_container_btn = gr.Button("Fetch Container Logs", interactive=False)
|
436 |
+
manual_build_txt = gr.Textbox(label="Build Logs", lines=10, interactive=False)
|
437 |
+
manual_container_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False)
|
438 |
+
|
439 |
+
for btn in (manual_build_btn, manual_container_btn):
|
440 |
+
manual_control_tab.load(
|
441 |
+
lambda rid, p, t: gr.update(interactive=bool(rid and p and t)),
|
442 |
+
inputs=[manual_repo, manual_profile, manual_token],
|
443 |
+
outputs=[btn]
|
444 |
+
)
|
445 |
+
manual_login_btn.click(
|
446 |
+
lambda rid, p, t: gr.update(interactive=bool(rid and p and t)),
|
447 |
+
inputs=[manual_repo, manual_profile, manual_token],
|
448 |
+
outputs=[btn]
|
449 |
+
)
|
450 |
+
|
451 |
+
manual_build_btn.click(
|
452 |
+
get_build_logs_action,
|
453 |
+
inputs=[manual_repo, manual_profile, manual_token],
|
454 |
+
outputs=[manual_build_txt]
|
455 |
+
)
|
456 |
+
manual_container_btn.click(
|
457 |
+
get_container_logs_action,
|
458 |
+
inputs=[manual_repo, manual_profile, manual_token],
|
459 |
+
outputs=[manual_container_txt]
|
460 |
+
)
|
461 |
|
462 |
demo = gr.TabbedInterface(
|
463 |
[ai_builder_tab, manual_control_tab],
|