wuhp commited on
Commit
3290861
·
verified ·
1 Parent(s): 6dbcd3a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +111 -189
app.py CHANGED
@@ -1,8 +1,10 @@
1
  # app.py
2
 
3
  import os
 
4
  import time
5
  import json
 
6
  import requests
7
 
8
  import gradio as gr
@@ -65,7 +67,7 @@ def upload_file_to_space_action(
65
  )
66
  return f"✅ Uploaded `{path_in_repo}`"
67
  except Exception as e:
68
- return f"Error uploading file: {e}"
69
 
70
  def _fetch_space_logs_level(repo_id: str, level: str, token: str) -> str:
71
  jwt_url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt"
@@ -120,15 +122,10 @@ def configure_gemini(api_key: str | None, model_name: str | None) -> str:
120
  return f"Error configuring Gemini: {e}"
121
 
122
  def call_gemini(prompt: str, api_key: str, model_name: str) -> str:
123
- if not api_key or not model_name:
124
- return "Error: Gemini API key or model not provided."
125
- try:
126
- genai.configure(api_key=api_key)
127
- model = genai.GenerativeModel(model_name)
128
- response = model.generate_content(prompt)
129
- return response.text or "Gemini returned an empty response."
130
- except Exception as e:
131
- return f"Error calling Gemini API with {model_name}: {e}"
132
 
133
 
134
  # --- AI workflow logic ---
@@ -156,106 +153,124 @@ def ai_workflow_chat(
156
  ]:
157
  history.append([message, None])
158
  bot_message = ""
159
- new_repo_id = repo_id_state
160
- new_workflow = workflow_state
161
  updated_preview = preview_html
162
- updated_container = container_logs
163
- updated_build = build_logs
164
 
165
  try:
 
166
  if not hf_profile or not hf_token:
167
  bot_message = "Please log in to Hugging Face first."
168
- new_workflow = "awaiting_login"
169
  elif not gemini_api_key or not gemini_model:
170
  bot_message = "Please enter your API key and select a Gemini model."
171
- new_workflow = "awaiting_api_key"
172
- elif (new_workflow == "idle" or "create" in message.lower()) and not new_repo_id:
 
 
 
 
 
 
 
 
 
 
 
 
 
173
  bot_message = "What should the Space be called? (e.g., `my-awesome-app`)"
174
- new_workflow = "awaiting_repo_name"
175
- elif new_workflow == "awaiting_repo_name":
 
176
  repo_name = message.strip()
177
- if not repo_name:
178
- bot_message = "Please provide a valid Space name."
179
- else:
180
- bot_message = f"Creating Space `{hf_profile.username}/{repo_name}`..."
181
- new_repo_id, iframe_html = create_space_action(repo_name, space_sdk, hf_profile, hf_token)
182
- updated_preview = iframe_html
183
- bot_message += "\n✅ Space created."
184
- new_workflow = "awaiting_app_description"
185
- elif new_workflow in ("awaiting_app_description", "debugging"):
186
- if new_workflow == "awaiting_app_description":
187
- app_desc = message
188
- bot_message = f"Generating code for a `{space_sdk}` app based on: '{app_desc}'..."
189
- prompt = f"""
190
  You are an AI assistant specializing in Hugging Face Spaces using the {space_sdk} SDK.
191
  Generate a full, single-file Python app based on:
192
- '{app_desc}'
193
- Return **only** the code block (```python ...```).
194
  """
195
- else:
196
- debug_instr = message
197
- logs = get_container_logs_action(new_repo_id, hf_profile, hf_token)
198
- bot_message = f"Analyzing logs and applying fixes: '{debug_instr}'..."
199
- prompt = f"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
200
  You are debugging a {space_sdk} Space.
201
- Logs:
202
- {logs}
203
- User instructions:
204
- '{debug_instr}'
205
- Generate a fixed, single-file Python app. Return only the ```python``` code block.
206
  """
207
- new_workflow = "generating_code"
208
- resp = call_gemini(prompt, gemini_api_key, gemini_model)
209
- start = resp.find("```python")
210
- end = resp.rfind("```")
211
- if start != -1 and end != -1 and end > start:
212
- code = resp[start + len("```python"):end].strip()
213
- bot_message += "\n✅ Code generated. Uploading..."
214
- new_workflow = "uploading_code"
215
- upload_log = upload_file_to_space_action(code, "app.py", new_repo_id, hf_profile, hf_token)
216
- bot_message += "\n" + upload_log
217
- if "✅ Uploaded" in upload_log:
218
- bot_message += "\nThe Space is now rebuilding. Say 'check logs' to fetch them."
219
- new_workflow = "awaiting_log_check"
220
- updated_preview = f'<iframe src="https://huggingface.co/spaces/{new_repo_id}" width="100%" height="500px"></iframe>'
221
- else:
222
- new_workflow = "idle"
223
- else:
224
- bot_message += f"\n⚠️ Could not parse code from Gemini.\nResponse:\n{resp}"
225
- new_workflow = "awaiting_app_description"
226
- elif new_workflow == "awaiting_log_check" and "check logs" in message.lower():
227
- bot_message = "Fetching logs..."
228
- updated_container = get_container_logs_action(new_repo_id, hf_profile, hf_token)
229
- updated_build = get_build_logs_action(new_repo_id, hf_profile, hf_token)
230
- bot_message += "\n✅ Logs updated. Describe any errors or say 'generate fix'."
231
- new_workflow = "reviewing_logs"
232
- elif new_workflow == "reviewing_logs" and "generate fix" in message.lower():
233
- latest = get_container_logs_action(new_repo_id, hf_profile, hf_token)
234
- if "Error" not in latest and "Exception" not in latest:
235
- bot_message = "No clear error found. What should I fix?"
236
- new_workflow = "reviewing_logs"
237
  else:
238
- bot_message = "Generating a fix based on logs..."
239
- new_workflow = "debugging"
240
- elif "reset" in message.lower():
 
 
 
241
  bot_message = "Workflow reset."
242
- new_repo_id = None
243
  updated_preview = "<p>No Space created yet.</p>"
244
- updated_container = ""
245
  updated_build = ""
246
- new_workflow = "idle"
247
- else:
248
- bot_message = "Command not recognized. Try 'create', 'check logs', 'generate fix', or 'reset'."
 
 
 
249
  except Exception as e:
250
  bot_message = f"Unexpected error: {e}"
251
- new_workflow = "idle"
252
 
253
- if history and history[-1][1] is None:
254
- history[-1][1] = bot_message
255
- else:
256
- history.append([None, bot_message])
257
-
258
- return history, new_repo_id, new_workflow, updated_preview, updated_container, updated_build
259
 
260
 
261
  # --- Build the Gradio UI ---
@@ -264,7 +279,7 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
264
  hf_profile = gr.State(None)
265
  hf_token = gr.State(None)
266
  gemini_key = gr.State(None)
267
- gemini_model = gr.State("gemini-2.5-pro-preview-03-25")
268
  repo_id = gr.State(None)
269
  workflow = gr.State("idle")
270
  sdk_state = gr.State("gradio")
@@ -275,7 +290,6 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
275
  gr.Markdown("## Hugging Face Login")
276
  login_status = gr.Markdown("*Not logged in.*")
277
  login_btn = gr.LoginButton(variant="huggingface")
278
-
279
  ai_builder_tab.load(show_profile, outputs=login_status)
280
  login_btn.click(show_profile, outputs=login_status)
281
  login_btn.click(lambda p, t: (p, t), outputs=[hf_profile, hf_token])
@@ -289,14 +303,16 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
289
  model_selector = gr.Radio(
290
  choices=[
291
  ("Gemini 2.5 Flash Preview 04-17", "gemini-2.5-flash-preview-04-17"),
292
- ("Gemini 2.5 Pro Preview 03-25", "gemini-2.5-pro-preview-03-25")
 
 
 
293
  ],
294
- value="gemini-2.5-pro-preview-03-25",
295
  label="Select model"
296
  )
297
  model_selector.change(lambda m: m, inputs=model_selector, outputs=gemini_model)
298
 
299
- # configure Gemini on key or model change
300
  ai_builder_tab.load(
301
  configure_gemini,
302
  inputs=[gemini_key, gemini_model],
@@ -370,99 +386,5 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
370
  ]
371
  )
372
 
373
- with gr.Blocks(title="Manual Hugging Face Space Manager") as manual_control_tab:
374
- manual_profile = gr.State(None)
375
- manual_token = gr.State(None)
376
- manual_repo = gr.State(None)
377
-
378
- gr.Markdown("## Manual Sign-In & Space Management")
379
- manual_login_btn = gr.LoginButton(variant="huggingface", size="lg")
380
- manual_status = gr.Markdown("*Not logged in.*")
381
- manual_models = gr.Markdown()
382
-
383
- manual_control_tab.load(show_profile, outputs=manual_status)
384
- manual_login_btn.click(show_profile, outputs=manual_status)
385
- manual_control_tab.load(list_private_models, outputs=manual_models)
386
- manual_login_btn.click(list_private_models, outputs=manual_models)
387
- manual_login_btn.click(lambda p, t: (p, t), outputs=[manual_profile, manual_token])
388
-
389
- manual_repo_name = gr.Textbox(label="New Space name", placeholder="my-space")
390
- manual_sdk_sel = gr.Radio(choices=["gradio","streamlit"], value="gradio", label="Template SDK")
391
- manual_create_btn = gr.Button("Create Space", interactive=False)
392
- manual_create_logs = gr.Textbox(label="Create Logs", lines=3, interactive=False)
393
- manual_preview = gr.HTML("<p>No Space created yet.</p>")
394
-
395
- manual_control_tab.load(
396
- lambda p, t: gr.update(interactive=bool(p and t)),
397
- inputs=[manual_profile, manual_token],
398
- outputs=[manual_create_btn]
399
- )
400
- manual_login_btn.click(
401
- lambda p, t: gr.update(interactive=bool(p and t)),
402
- inputs=[manual_profile, manual_token],
403
- outputs=[manual_create_btn]
404
- )
405
-
406
- manual_create_btn.click(
407
- create_space_action,
408
- inputs=[manual_repo_name, manual_sdk_sel, manual_profile, manual_token],
409
- outputs=[manual_repo, manual_preview]
410
- ).then(lambda _: "", outputs=[manual_create_logs])
411
-
412
- manual_path = gr.Textbox(label="Path in Space", value="app.py")
413
- manual_file = gr.File(label="Select file")
414
- manual_up_btn = gr.Button("Upload File", interactive=False)
415
- manual_up_log = gr.Textbox(label="Upload Logs", lines=2, interactive=False)
416
-
417
- manual_control_tab.load(
418
- lambda rid, p, t: gr.update(interactive=bool(rid and p and t)),
419
- inputs=[manual_repo, manual_profile, manual_token],
420
- outputs=[manual_up_btn]
421
- )
422
- manual_login_btn.click(
423
- lambda rid, p, t: gr.update(interactive=bool(rid and p and t)),
424
- inputs=[manual_repo, manual_profile, manual_token],
425
- outputs=[manual_up_btn]
426
- )
427
-
428
- manual_up_btn.click(
429
- upload_file_to_space_action,
430
- inputs=[manual_file, manual_path, manual_repo, manual_profile, manual_token],
431
- outputs=[manual_up_log]
432
- )
433
-
434
- manual_build_btn = gr.Button("Fetch Build Logs", interactive=False)
435
- manual_container_btn = gr.Button("Fetch Container Logs", interactive=False)
436
- manual_build_txt = gr.Textbox(label="Build Logs", lines=10, interactive=False)
437
- manual_container_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False)
438
-
439
- for btn in (manual_build_btn, manual_container_btn):
440
- manual_control_tab.load(
441
- lambda rid, p, t: gr.update(interactive=bool(rid and p and t)),
442
- inputs=[manual_repo, manual_profile, manual_token],
443
- outputs=[btn]
444
- )
445
- manual_login_btn.click(
446
- lambda rid, p, t: gr.update(interactive=bool(rid and p and t)),
447
- inputs=[manual_repo, manual_profile, manual_token],
448
- outputs=[btn]
449
- )
450
-
451
- manual_build_btn.click(
452
- get_build_logs_action,
453
- inputs=[manual_repo, manual_profile, manual_token],
454
- outputs=[manual_build_txt]
455
- )
456
- manual_container_btn.click(
457
- get_container_logs_action,
458
- inputs=[manual_repo, manual_profile, manual_token],
459
- outputs=[manual_container_txt]
460
- )
461
-
462
- demo = gr.TabbedInterface(
463
- [ai_builder_tab, manual_control_tab],
464
- ["AI App Builder", "Manual Control"]
465
- )
466
-
467
  if __name__ == "__main__":
468
- demo.launch()
 
1
  # app.py
2
 
3
  import os
4
+ import re
5
  import time
6
  import json
7
+ import io
8
  import requests
9
 
10
  import gradio as gr
 
67
  )
68
  return f"✅ Uploaded `{path_in_repo}`"
69
  except Exception as e:
70
+ return f"Error uploading `{path_in_repo}`: {e}"
71
 
72
  def _fetch_space_logs_level(repo_id: str, level: str, token: str) -> str:
73
  jwt_url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt"
 
122
  return f"Error configuring Gemini: {e}"
123
 
124
  def call_gemini(prompt: str, api_key: str, model_name: str) -> str:
125
+ genai.configure(api_key=api_key)
126
+ model = genai.GenerativeModel(model_name)
127
+ response = model.generate_content(prompt)
128
+ return response.text or ""
 
 
 
 
 
129
 
130
 
131
  # --- AI workflow logic ---
 
153
  ]:
154
  history.append([message, None])
155
  bot_message = ""
156
+ repo_id = repo_id_state
157
+ state = workflow_state
158
  updated_preview = preview_html
159
+ updated_build = build_logs
160
+ updated_run = container_logs
161
 
162
  try:
163
+ # 1) Ensure login & API key
164
  if not hf_profile or not hf_token:
165
  bot_message = "Please log in to Hugging Face first."
166
+ state = "awaiting_login"
167
  elif not gemini_api_key or not gemini_model:
168
  bot_message = "Please enter your API key and select a Gemini model."
169
+ state = "awaiting_api_key"
170
+
171
+ # 2) Auto-detect “generate me ... app called NAME”
172
+ elif state == "idle" and re.search(r'generate (?:me )?(?:a|an) \w+ app called (\w+)', message, re.I):
173
+ m = re.search(r'generate (?:me )?(?:a|an) \w+ app called (\w+)', message, re.I)
174
+ repo_name = m.group(1)
175
+ bot_message = f"Creating Space `{hf_profile.username}/{repo_name}`..."
176
+ repo_id, iframe_html = create_space_action(repo_name, space_sdk, hf_profile, hf_token)
177
+ updated_preview = iframe_html
178
+ bot_message += "\n✅ Space created. Generating and uploading code..."
179
+ state = "generating_all"
180
+ app_desc = message
181
+
182
+ # 3) Manual start: waiting for repo name
183
+ elif (state in ("idle","awaiting_login","awaiting_api_key") or ("create" in message.lower())) and not repo_id:
184
  bot_message = "What should the Space be called? (e.g., `my-awesome-app`)"
185
+ state = "awaiting_repo_name"
186
+
187
+ elif state == "awaiting_repo_name":
188
  repo_name = message.strip()
189
+ bot_message = f"Creating Space `{hf_profile.username}/{repo_name}`..."
190
+ repo_id, iframe_html = create_space_action(repo_name, space_sdk, hf_profile, hf_token)
191
+ updated_preview = iframe_html
192
+ bot_message += "\n✅ Space created."
193
+ state = "generating_all"
194
+ app_desc = None
195
+
196
+ # 4) Generate code + requirements + README + deploy
197
+ if state == "generating_all":
198
+ # generate app.py
199
+ prompt = f"""
 
 
200
  You are an AI assistant specializing in Hugging Face Spaces using the {space_sdk} SDK.
201
  Generate a full, single-file Python app based on:
202
+ '{app_desc or 'a Gradio image-blur test app with upload and slider controls'}'
203
+ Return **only** the python code block for app.py.
204
  """
205
+ code = call_gemini(prompt, gemini_api_key, gemini_model)
206
+ code = code.strip().strip("```python").strip("```")
207
+ # upload app.py
208
+ upload_file_to_space_action(io.StringIO(code), "app.py", repo_id, hf_profile, hf_token)
209
+ # generate requirements.txt
210
+ reqs = "\n".join([
211
+ "gradio",
212
+ "google-generativeai",
213
+ "huggingface_hub",
214
+ "requests"
215
+ ]) + "\n"
216
+ upload_file_to_space_action(io.StringIO(reqs), "requirements.txt", repo_id, hf_profile, hf_token)
217
+ # generate README.md
218
+ readme = f"# {repo_id.split('/')[-1]}\n\n" \
219
+ "This Hugging Face Space was generated by an AI.\n\n" \
220
+ "## Usage\n\n" \
221
+ "Upload an image and use the slider to control blur intensity.\n"
222
+ upload_file_to_space_action(io.StringIO(readme), "README.md", repo_id, hf_profile, hf_token)
223
+
224
+ bot_message += "\n✅ All files uploaded. Building and checking logs..."
225
+ state = "checking_logs"
226
+
227
+ # 5) Fetch logs and auto-debug loop
228
+ if state == "checking_logs":
229
+ updated_build = get_build_logs_action(repo_id, hf_profile, hf_token)
230
+ updated_run = get_container_logs_action(repo_id, hf_profile, hf_token)
231
+ # if errors detected, auto-debug up to 3 attempts
232
+ attempts = 0
233
+ while attempts < 3 and ( "Error" in updated_run or "Exception" in updated_run ):
234
+ attempts += 1
235
+ bot_message += f"\n🔧 Debug attempt #{attempts}"
236
+ debug_prompt = f"""
237
  You are debugging a {space_sdk} Space.
238
+ Here are the container logs:
239
+ {updated_run}
240
+
241
+ Generate a fixed version of app.py only. Return the python code block.
 
242
  """
243
+ fix_code = call_gemini(debug_prompt, gemini_api_key, gemini_model)
244
+ fix_code = fix_code.strip().strip("```python").strip("```")
245
+ upload_file_to_space_action(io.StringIO(fix_code), "app.py", repo_id, hf_profile, hf_token)
246
+ time.sleep(5) # wait for rebuild
247
+ updated_run = get_container_logs_action(repo_id, hf_profile, hf_token)
248
+ if "Error" not in updated_run and "Exception" not in updated_run:
249
+ bot_message += "\n✅ Application deployed successfully!"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
250
  else:
251
+ bot_message += "\n❌ Could not fully debug after 3 attempts. Please check logs."
252
+
253
+ state = "idle"
254
+
255
+ # 6) Reset workflow
256
+ if "reset" in message.lower():
257
  bot_message = "Workflow reset."
258
+ repo_id = None
259
  updated_preview = "<p>No Space created yet.</p>"
260
+ updated_run = ""
261
  updated_build = ""
262
+ state = "idle"
263
+
264
+ # Catch-all
265
+ if not bot_message:
266
+ bot_message = "Command not recognized. Try 'generate me a gradio app called myapp', or 'reset'."
267
+
268
  except Exception as e:
269
  bot_message = f"Unexpected error: {e}"
270
+ state = "idle"
271
 
272
+ history[-1][1] = bot_message
273
+ return history, repo_id, state, updated_preview, updated_run, updated_build
 
 
 
 
274
 
275
 
276
  # --- Build the Gradio UI ---
 
279
  hf_profile = gr.State(None)
280
  hf_token = gr.State(None)
281
  gemini_key = gr.State(None)
282
+ gemini_model = gr.State("gemini-2.5-flash-preview-04-17")
283
  repo_id = gr.State(None)
284
  workflow = gr.State("idle")
285
  sdk_state = gr.State("gradio")
 
290
  gr.Markdown("## Hugging Face Login")
291
  login_status = gr.Markdown("*Not logged in.*")
292
  login_btn = gr.LoginButton(variant="huggingface")
 
293
  ai_builder_tab.load(show_profile, outputs=login_status)
294
  login_btn.click(show_profile, outputs=login_status)
295
  login_btn.click(lambda p, t: (p, t), outputs=[hf_profile, hf_token])
 
303
  model_selector = gr.Radio(
304
  choices=[
305
  ("Gemini 2.5 Flash Preview 04-17", "gemini-2.5-flash-preview-04-17"),
306
+ ("Gemini 2.5 Pro Preview 03-25", "gemini-2.5-pro-preview-03-25"),
307
+ ("Gemini 2.0 Flash", "gemini-2.0-flash"),
308
+ ("Gemini 2.0 Flash‑Lite", "gemini-2.0-flash-lite"),
309
+ ("Gemini 1.5 Flash", "gemini-1.5-flash"),
310
  ],
311
+ value="gemini-2.5-flash-preview-04-17",
312
  label="Select model"
313
  )
314
  model_selector.change(lambda m: m, inputs=model_selector, outputs=gemini_model)
315
 
 
316
  ai_builder_tab.load(
317
  configure_gemini,
318
  inputs=[gemini_key, gemini_model],
 
386
  ]
387
  )
388
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
389
  if __name__ == "__main__":
390
+ ai_builder_tab.launch()