wuhp commited on
Commit
2528f91
·
verified ·
1 Parent(s): 74f7159

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +565 -321
app.py CHANGED
@@ -14,6 +14,7 @@ from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_st
14
  # --- Helper functions for Hugging Face integration ---
15
 
16
  def show_profile(profile: gr.OAuthProfile | None) -> str:
 
17
  if profile is None:
18
  return "*Not logged in.*"
19
  return f"✅ Logged in as **{profile.username}**"
@@ -22,6 +23,7 @@ def list_private_models(
22
  profile: gr.OAuthProfile | None,
23
  oauth_token: gr.OAuthToken | None
24
  ) -> str:
 
25
  if profile is None or oauth_token is None:
26
  return "Please log in to see your models."
27
  try:
@@ -31,15 +33,19 @@ def list_private_models(
31
  ]
32
  return "No models found." if not models else "Models:\n\n" + "\n - ".join(models)
33
  except Exception as e:
 
34
  return f"Error listing models: {e}"
35
 
36
  def create_space_action(repo_name: str, sdk: str, profile: gr.OAuthProfile, token: gr.OAuthToken):
 
 
 
37
  repo_id = f"{profile.username}/{repo_name}"
38
  try:
39
  create_repo(
40
  repo_id=repo_id,
41
  token=token.token,
42
- exist_ok=True,
43
  repo_type="space",
44
  space_sdk=sdk
45
  )
@@ -50,12 +56,13 @@ def create_space_action(repo_name: str, sdk: str, profile: gr.OAuthProfile, toke
50
  raise RuntimeError(f"Failed to create Space {repo_id}: {e}")
51
 
52
  def upload_file_to_space_action(
53
- file_obj,
54
  path_in_repo: str,
55
  repo_id: str,
56
  profile: gr.OAuthProfile,
57
  token: gr.OAuthToken
58
  ) -> None:
 
59
  if not (profile and token and repo_id):
60
  raise ValueError("Hugging Face profile, token, or repo_id is missing.")
61
  try:
@@ -70,22 +77,24 @@ def upload_file_to_space_action(
70
  raise RuntimeError(f"Failed to upload `{path_in_repo}` to {repo_id}: {e}")
71
 
72
  def _fetch_space_logs_level(repo_id: str, level: str, token: str) -> str:
 
73
  if not repo_id or not token:
74
  return f"Cannot fetch {level} logs: repo_id or token missing."
75
  jwt_url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt"
76
  try:
77
  r = get_session().get(jwt_url, headers=build_hf_headers(token=token))
78
- hf_raise_for_status(r)
79
  jwt = r.json()["token"]
80
  logs_url = f"https://api.hf.space/v1/{repo_id}/logs/{level}"
81
  lines, count = [], 0
 
82
  with get_session().get(logs_url, headers=build_hf_headers(token=jwt), stream=True, timeout=30) as resp:
83
  hf_raise_for_status(resp)
84
  for raw in resp.iter_lines():
85
- if count >= 200:
86
  lines.append("... truncated ...")
87
  break
88
- if not raw.startswith(b"data: "):
89
  continue
90
  payload = raw[len(b"data: "):]
91
  try:
@@ -96,21 +105,27 @@ def _fetch_space_logs_level(repo_id: str, level: str, token: str) -> str:
96
  lines.append(f"[{ts}] {txt}")
97
  count += 1
98
  except json.JSONDecodeError:
 
99
  continue
100
  return "\n".join(lines) if lines else f"No {level} logs found."
101
  except Exception as e:
 
102
  return f"Error fetching {level} logs: {e}"
103
 
104
 
105
  def get_build_logs_action(repo_id, profile, token):
 
106
  if not (repo_id and profile and token):
107
  return "⚠️ Cannot fetch build logs: log in and create a Space first."
 
108
  time.sleep(5)
109
  return _fetch_space_logs_level(repo_id, "build", token.token)
110
 
111
  def get_container_logs_action(repo_id, profile, token):
 
112
  if not (repo_id and profile and token):
113
  return "⚠️ Cannot fetch container logs: log in and create a Space first."
 
114
  time.sleep(10)
115
  return _fetch_space_logs_level(repo_id, "run", token.token)
116
 
@@ -118,32 +133,38 @@ def get_container_logs_action(repo_id, profile, token):
118
  # --- Google Gemini integration with model selection ---
119
 
120
  def configure_gemini(api_key: str | None, model_name: str | None) -> str:
 
121
  if not api_key:
122
  return "Gemini API key is not set."
123
  if not model_name:
124
  return "Please select a Gemini model."
125
  try:
126
  genai.configure(api_key=api_key)
 
 
127
  genai.GenerativeModel(model_name).generate_content("ping", stream=False)
128
  return f"Gemini configured successfully with **{model_name}**."
129
  except Exception as e:
130
  return f"Error configuring Gemini: {e}"
131
 
132
  def call_gemini(prompt: str, api_key: str, model_name: str) -> str:
 
133
  if not api_key or not model_name:
134
  raise ValueError("Gemini API key or model not set.")
135
  try:
136
  genai.configure(api_key=api_key)
137
  model = genai.GenerativeModel(model_name)
 
138
  response = model.generate_content(prompt, stream=False)
139
- return response.text or ""
140
  except Exception as e:
 
141
  raise RuntimeError(f"Gemini API call failed: {e}")
142
 
143
 
144
  # --- AI workflow logic (State Machine) ---
145
 
146
- # Define States
147
  STATE_IDLE = "idle"
148
  STATE_AWAITING_REPO_NAME = "awaiting_repo_name"
149
  STATE_CREATING_SPACE = "creating_space"
@@ -159,13 +180,14 @@ STATE_DEBUGGING_CODE = "debugging_code"
159
  STATE_UPLOADING_FIXED_APP_PY = "uploading_fixed_app_py"
160
  STATE_COMPLETE = "complete"
161
 
162
- MAX_DEBUG_ATTEMPTS = 3
163
 
164
  def add_bot_message(history: list[dict], bot_message: str) -> list[dict]:
165
- """Helper to add a new assistant message to the history."""
166
  history.append({"role": "assistant", "content": bot_message})
167
  return history
168
 
 
169
  def ai_workflow_chat(
170
  message: str,
171
  history: list[dict],
@@ -176,28 +198,33 @@ def ai_workflow_chat(
176
  repo_id_state: str | None,
177
  workflow_state: str,
178
  space_sdk: str,
179
- preview_html: str,
180
- container_logs: str,
181
- build_logs: str,
182
  debug_attempts_state: int,
183
  app_description_state: str | None,
184
  repo_name_state: str | None,
185
  generated_code_state: str | None,
 
186
  *args,
187
  **kwargs
188
  ) -> tuple[
189
- list[dict],
190
- str | None,
191
- str,
192
- str,
193
- str,
194
- str,
195
- int,
196
- str | None,
197
- str | None,
198
- str | None,
199
  ]:
200
- # Unpack state variables
 
 
 
 
201
  repo_id = repo_id_state
202
  state = workflow_state
203
  attempts = debug_attempts_state
@@ -205,305 +232,462 @@ def ai_workflow_chat(
205
  repo_name = repo_name_state
206
  generated_code = generated_code_state
207
 
208
-
209
  updated_preview = preview_html
210
  updated_build = build_logs
211
  updated_run = container_logs
212
 
213
- # Add user message to history in the new format
214
  user_message_entry = {"role": "user", "content": message}
 
215
  if hf_profile and hf_profile.username:
216
  user_message_entry["name"] = hf_profile.username
217
  history.append(user_message_entry)
218
 
219
- # Yield immediately to show user message
220
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
221
-
 
222
 
223
  try:
224
- # --- State Machine Logic ---
225
 
226
  if state == STATE_IDLE:
227
- # Check prerequisites within the state logic as well
228
  if not (hf_profile and hf_token):
229
  history = add_bot_message(history, "Workflow paused: Please log in to Hugging Face first.")
230
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
231
- return
 
 
 
232
  if not (gemini_api_key and gemini_model):
233
  history = add_bot_message(history, "Workflow paused: Please enter your API key and select a Gemini model.")
234
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
235
- return
 
 
236
 
237
- # Look for commands
238
  reset_match = "reset" in message.lower()
239
- generate_match = re.search(r'generate (?:me )?(?:a|an) \w+ app called (\w+)', message, re.I)
 
 
240
  create_match = re.search(r'create (?:a|an)? space called (\w+)', message, re.I)
241
 
242
  if reset_match:
 
243
  history = add_bot_message(history, "Workflow reset.")
244
- yield history, None, STATE_IDLE, "<p>No Space created yet.</p>", "", "", 0, None, None, None
245
- return
 
 
246
 
247
  elif generate_match:
248
- new_repo_name = generate_match.group(1)
249
- new_app_desc = message
250
- history = add_bot_message(history, f"Acknowledged: '{message}'. Starting workflow to create Space `{hf_profile.username}/{new_repo_name}`.")
 
 
251
  state = STATE_CREATING_SPACE
252
  repo_name = new_repo_name
253
  app_desc = new_app_desc
254
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
255
-
 
 
256
 
257
  elif create_match:
258
- new_repo_name = create_match.group(1)
 
259
  history = add_bot_message(history, f"Acknowledged: '{message}'. Starting workflow to create Space `{hf_profile.username}/{new_repo_name}`.")
 
260
  state = STATE_CREATING_SPACE
261
  repo_name = new_repo_name
262
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
 
 
 
263
 
264
  elif "create" in message.lower() and not repo_id:
 
265
  history = add_bot_message(history, "Okay, what should the Space be called? (e.g., `my-awesome-app`)")
266
- state = STATE_AWAITING_REPO_NAME
267
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
 
 
 
268
 
269
  else:
 
270
  history = add_bot_message(history, "Command not recognized. Try 'generate me a gradio app called myapp', or 'reset'.")
271
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
272
- return
 
 
273
 
274
 
275
- if state == STATE_AWAITING_REPO_NAME:
276
- new_repo_name = message.strip()
277
- if not new_repo_name or re.search(r'[^a-zA-Z0-9_-]', new_repo_name):
 
 
278
  history = add_bot_message(history, "Invalid name. Please provide a single word/slug for the Space name (letters, numbers, underscores, hyphens only).")
279
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
280
- else:
281
- history = add_bot_message(history, f"Using Space name `{new_repo_name}`. Creating Space `{hf_profile.username}/{new_repo_name}`...")
282
- state = STATE_CREATING_SPACE
283
- repo_name = new_repo_name
284
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
285
 
 
 
 
 
 
 
 
 
 
 
 
 
286
 
287
  elif state == STATE_CREATING_SPACE:
288
- if not repo_name:
289
- history = add_bot_message(history, "Internal error: Repo name missing for creation. Resetting.")
290
- yield history, None, STATE_IDLE, "<p>Error creating space.</p>", "", "", 0, None, None, None
291
- return
292
-
293
- try:
294
- new_repo_id, iframe_html = create_space_action(repo_name, space_sdk, hf_profile, hf_token)
295
- updated_preview = iframe_html
296
- repo_id = new_repo_id
297
- history = add_bot_message(history, f"✅ Space `{repo_id}` created. Click 'Send' to generate and upload code.")
298
- state = STATE_GENERATING_CODE
299
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
300
 
301
- except Exception as e:
302
- history = add_bot_message(history, f"❌ Error creating space: {e}. Click 'reset'.")
303
- yield history, None, STATE_IDLE, "<p>Error creating space.</p>", "", "", 0, None, None, None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
304
 
305
 
306
  elif state == STATE_GENERATING_CODE:
307
- prompt_desc = app_desc if app_desc else 'a Gradio image-blur test app with upload and slider controls'
308
- prompt = f"""
 
309
  You are an AI assistant specializing in Hugging Face Spaces using the {space_sdk} SDK.
310
  Generate a full, single-file Python app based on:
311
  '{prompt_desc}'
312
  Return **only** the python code block for app.py. Do not include any extra text, explanations, or markdown outside the code block.
313
  """
314
- try:
315
- history = add_bot_message(history, "🧠 Generating `app.py` code with Gemini...")
316
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
317
-
318
- code = call_gemini(prompt, gemini_api_key, gemini_model)
319
- code = code.strip()
320
- if code.startswith("```python"):
321
- code = code[len("```python"):].strip()
322
- if code.endswith("```"):
323
- code = code[:-len("```")].strip()
324
-
325
- if not code:
326
- raise ValueError("Gemini returned empty code.")
327
-
328
- history = add_bot_message(history, "✅ `app.py` code generated. Click 'Send' to upload.")
329
- state = STATE_UPLOADING_APP_PY
330
- generated_code = code
331
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
 
 
 
 
 
 
 
332
 
333
- except Exception as e:
334
- history = add_bot_message(history, f"❌ Error generating code: {e}. Click 'reset'.")
335
- yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
 
 
 
336
 
337
 
338
  elif state == STATE_UPLOADING_APP_PY:
339
- if not generated_code:
340
- history = add_bot_message(history, "Internal error: No code to upload. Resetting.")
341
- yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
342
- return
343
-
344
- history = add_bot_message(history, "☁️ Uploading `app.py`...")
345
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
346
 
347
- try:
348
- upload_file_to_space_action(io.StringIO(generated_code), "app.py", repo_id, hf_profile, hf_token)
349
- history = add_bot_message(history, "✅ Uploaded `app.py`. Click 'Send' to generate requirements.")
350
- state = STATE_GENERATING_REQUIREMENTS
351
- generated_code = None
352
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
353
-
354
- except Exception as e:
355
- history = add_bot_message(history, f"❌ Error uploading app.py: {e}. Click 'reset'.")
356
- yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
 
 
 
 
 
 
 
 
 
 
 
 
 
357
 
358
 
359
  elif state == STATE_GENERATING_REQUIREMENTS:
360
- history = add_bot_message(history, "📄 Generating `requirements.txt`...")
361
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
362
-
363
- reqs_list = ["gradio"] if space_sdk == "gradio" else ["streamlit"]
364
- if "google.generativeai" in str(app_desc).lower() or "gemini" in str(app_desc).lower() or gemini_api_key:
365
- reqs_list.append("google-generativeai")
366
- if "requests" in str(app_desc).lower():
367
- reqs_list.append("requests")
368
- reqs_list.append("huggingface_hub")
369
- if "image" in str(app_desc).lower() or "upload" in str(app_desc).lower() or "blur" in str(app_desc).lower() or "vision" in str(app_desc).lower():
370
- reqs_list.append("Pillow")
371
- if "numpy" in str(app_desc).lower(): reqs_list.append("numpy")
372
- if "pandas" in str(app_desc).lower(): reqs_list.append("pandas")
373
- if "scikit-image" in str(app_desc).lower() or "skimage" in str(app_desc).lower() or "cv2" in str(app_desc).lower() or "opencv-python" in str(app_desc).lower():
374
- reqs_list.append("scikit-image")
375
- reqs_list.append("opencv-python")
376
-
377
- reqs_list = list(dict.fromkeys(reqs_list))
378
-
379
- reqs_content = "\n".join(reqs_list) + "\n"
380
-
381
- history = add_bot_message(history, "✅ `requirements.txt` generated. Click 'Send' to upload.")
382
- state = STATE_UPLOADING_REQUIREMENTS
383
- generated_code = reqs_content
384
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
 
 
 
 
 
 
 
 
 
 
 
 
385
 
386
 
387
  elif state == STATE_UPLOADING_REQUIREMENTS:
 
388
  reqs_content_to_upload = generated_code
389
  if not reqs_content_to_upload:
390
  history = add_bot_message(history, "Internal error: No requirements content to upload. Resetting.")
391
- yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
392
- return
393
-
394
- history = add_bot_message(history, "☁️ Uploading `requirements.txt`...")
395
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
396
-
397
- try:
398
- upload_file_to_space_action(io.StringIO(reqs_content_to_upload), "requirements.txt", repo_id, hf_profile, hf_token)
399
- history = add_bot_message(history, "✅ Uploaded `requirements.txt`. Click 'Send' to generate README.")
400
- state = STATE_GENERATING_README
401
- generated_code = None
402
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
403
 
404
-
405
- except Exception as e:
406
- history = add_bot_message(history, f"❌ Error uploading requirements.txt: {e}. Click 'reset'.")
407
- yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
408
 
409
  elif state == STATE_GENERATING_README:
410
  history = add_bot_message(history, "📝 Generating `README.md`...")
411
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
 
 
412
 
 
413
  readme_title = repo_name if repo_name else "My Awesome Space"
414
  readme_description = app_desc if app_desc else f"This Hugging Face Space hosts an AI-generated {space_sdk} application."
415
 
416
- readme_content = f"# {readme_title}\n\n{readme_description}\n\n" \
417
- "This Space was automatically generated by an AI workflow.\n\n" \
418
- f"Built with the {space_sdk} SDK.\n"
 
 
 
 
 
 
 
 
 
 
 
 
 
419
 
420
  history = add_bot_message(history, "✅ `README.md` generated. Click 'Send' to upload.")
421
- state = STATE_UPLOADING_README
422
- generated_code = readme_content
423
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
 
 
 
424
 
425
 
426
  elif state == STATE_UPLOADING_README:
 
427
  readme_content_to_upload = generated_code
428
  if not readme_content_to_upload:
429
  history = add_bot_message(history, "Internal error: No README content to upload. Resetting.")
430
- yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
431
- return
432
-
433
- history = add_bot_message(history, "☁️ Uploading `README.md`...")
434
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
435
-
436
- try:
437
- upload_file_to_space_action(io.StringIO(readme_content_to_upload), "README.md", repo_id, hf_profile, hf_token)
438
- history = add_bot_message(history, "✅ Uploaded `README.md`. All files uploaded. Space is now building. Click 'Send' to check build logs.")
439
- state = STATE_CHECKING_LOGS_BUILD
440
- generated_code = None
441
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
442
-
443
-
444
- except Exception as e:
445
- history = add_bot_message(history, f"❌ Error uploading README.md: {e}. Click 'reset'.")
446
- yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
447
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
448
 
449
  elif state == STATE_CHECKING_LOGS_BUILD:
450
  history = add_bot_message(history, "🔍 Fetching build logs...")
451
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
 
 
452
 
 
453
  build_logs_text = get_build_logs_action(repo_id, hf_profile, hf_token)
454
- updated_build = build_logs_text
455
 
456
- if "Error" in updated_build or "Exception" in updated_build or "Build failed" in updated_build:
457
- history = add_bot_message(history, "⚠️ Build logs indicate potential issues. Please inspect above. Click 'Send' to check container logs (app might still start).")
458
- state = STATE_CHECKING_LOGS_RUN
459
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
 
 
 
 
460
 
461
  else:
462
  history = add_bot_message(history, "✅ Build logs fetched. Click 'Send' to check container logs.")
463
- state = STATE_CHECKING_LOGS_RUN
464
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
 
 
 
465
 
466
 
467
  elif state == STATE_CHECKING_LOGS_RUN:
468
  history = add_bot_message(history, "🔍 Fetching container logs...")
469
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
 
 
470
 
 
471
  container_logs_text = get_container_logs_action(repo_id, hf_profile, hf_token)
472
- updated_run = container_logs_text
473
 
474
- if ("Error" in updated_run or "Exception" in updated_run) and attempts < MAX_DEBUG_ATTEMPTS:
475
- attempts += 1
 
476
  history = add_bot_message(history, f"❌ Errors detected in container logs. Attempting debug fix #{attempts}/{MAX_DEBUG_ATTEMPTS}. Click 'Send' to proceed.")
477
- state = STATE_DEBUGGING_CODE
478
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
479
-
480
- elif ("Error" in updated_run or "Exception" in updated_run) and attempts >= MAX_DEBUG_ATTEMPTS:
 
 
 
 
481
  history = add_bot_message(history, f"❌ Errors detected in container logs. Max debug attempts ({MAX_DEBUG_ATTEMPTS}) reached. Please inspect logs manually or click 'reset'.")
482
- state = STATE_COMPLETE
483
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
 
 
 
484
 
485
  else:
 
486
  history = add_bot_message(history, "✅ App appears to be running successfully! Check the iframe above. Click 'reset' to start a new project.")
487
- state = STATE_COMPLETE
488
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
 
 
 
489
 
490
 
491
  elif state == STATE_DEBUGGING_CODE:
492
  history = add_bot_message(history, f"🧠 Calling Gemini to generate fix based on logs...")
493
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
 
 
494
 
 
495
  debug_prompt = f"""
496
  You are debugging a {space_sdk} Space. The goal is to fix the code in `app.py` based on the container logs provided.
497
 
498
  Here are the container logs:
 
 
499
  {updated_run}
500
-
501
  Generate the *complete, fixed* content for `app.py` based on these logs.
502
  Return **only** the python code block for app.py. Do not include any extra text, explanations, or markdown outside the code block.
503
  """
504
  try:
 
505
  fix_code = call_gemini(debug_prompt, gemini_api_key, gemini_model)
506
  fix_code = fix_code.strip()
 
507
  if fix_code.startswith("```python"):
508
  fix_code = fix_code[len("```python"):].strip()
509
  if fix_code.endswith("```"):
@@ -513,94 +697,125 @@ Return **only** the python code block for app.py. Do not include any extra text,
513
  raise ValueError("Gemini returned empty fix code.")
514
 
515
  history = add_bot_message(history, "✅ Fix code generated. Click 'Send' to upload.")
516
- state = STATE_UPLOADING_FIXED_APP_PY
517
- generated_code = fix_code
518
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
519
-
 
 
520
 
521
  except Exception as e:
522
  history = add_bot_message(history, f"❌ Error generating debug code: {e}. Click 'reset'.")
523
- yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
 
 
 
524
 
525
  elif state == STATE_UPLOADING_FIXED_APP_PY:
 
526
  fixed_code_to_upload = generated_code
527
  if not fixed_code_to_upload:
528
  history = add_bot_message(history, "Internal error: No fixed code available to upload. Resetting.")
529
- yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
530
- return
531
-
532
- history = add_bot_message(history, "☁️ Uploading fixed `app.py`...")
533
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
534
-
535
- try:
536
- upload_file_to_space_action(io.StringIO(fixed_code_to_upload), "app.py", repo_id, hf_profile, hf_token)
537
- history = add_bot_message(history, "✅ Fixed `app.py` uploaded. Space will rebuild. Click 'Send' to check logs again.")
538
- state = STATE_CHECKING_LOGS_RUN
539
- generated_code = None
540
- yield history, repo_id, state, updated_preview, updated_run, updated_build, attempts, app_desc, repo_name, generated_code
541
 
542
-
543
- except Exception as e:
544
- history = add_bot_message(history, f"❌ Error uploading fixed app.py: {e}. Click 'reset'.")
545
- yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
546
 
547
  elif state == STATE_COMPLETE:
548
- pass # No further action needed in this state
 
 
 
 
549
 
550
 
551
  except Exception as e:
 
552
  error_message = f"Workflow step failed unexpectedly ({state}): {e}. Click 'Send' to re-attempt this step or 'reset'."
553
  history = add_bot_message(history, error_message)
554
- print(f"Critical Error in state {state}: {e}")
555
- yield history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0, None, None, None
556
- # Removed the 'return' here
 
 
557
 
558
 
559
  # --- Build the Gradio UI ---
560
 
561
  with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
562
- # State variables
563
  hf_profile = gr.State(None)
564
  hf_token = gr.State(None)
565
  gemini_key = gr.State(None)
566
- gemini_model = gr.State("gemini-1.5-flash")
567
- repo_id = gr.State(None)
568
- workflow = gr.State(STATE_IDLE)
569
- sdk_state = gr.State("gradio")
570
- debug_attempts = gr.State(0)
571
- app_description = gr.State(None)
572
- repo_name_state = gr.State(None)
573
- generated_code_state = gr.State(None)
574
 
575
  with gr.Row():
576
- # Sidebar
577
  with gr.Column(scale=1, min_width=300):
578
  gr.Markdown("## Hugging Face Login")
579
  login_status = gr.Markdown("*Not logged in.*")
 
580
  login_btn = gr.LoginButton(variant="huggingface")
581
 
582
- # Initial load to check login status
583
  ai_builder_tab.load(show_profile, outputs=login_status)
584
- # Update status on login click
585
  login_btn.click(show_profile, outputs=login_status)
586
 
587
  gr.Markdown("## Google AI Studio API Key")
588
- gemini_input = gr.Textbox(label="API Key", type="password", interactive=True)
589
- gemini_status = gr.Markdown("")
 
 
 
 
 
 
590
 
591
  gr.Markdown("## Gemini Model")
 
592
  model_selector = gr.Radio(
593
  choices=[
594
  ("Gemini 1.5 Flash", "gemini-1.5-flash"),
595
  ("Gemini 1.5 Pro", "gemini-1.5-pro"),
596
  ("Gemini 1.0 Pro", "gemini-1.0-pro"),
597
  ],
598
- value="gemini-1.5-flash",
599
  label="Select model",
600
  interactive=True
601
  )
602
 
603
- # Configure Gemini status on load
604
  ai_builder_tab.load(
605
  configure_gemini,
606
  inputs=[gemini_key, gemini_model],
@@ -608,145 +823,165 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
608
  )
609
 
610
  gr.Markdown("## Space SDK")
 
611
  sdk_selector = gr.Radio(choices=["gradio","streamlit"], value="gradio", label="Template SDK", interactive=True)
 
612
  sdk_selector.change(lambda s: s, inputs=sdk_selector, outputs=sdk_state)
613
 
614
  gr.Markdown("## Workflow Status")
 
615
  status_text = gr.Textbox(label="Current State", value=STATE_IDLE, interactive=False)
616
  repo_id_text = gr.Textbox(label="Current Space ID", value="None", interactive=False)
617
 
618
- # --- Debugging Indicator ---
619
- prereq_status = gr.Markdown("Checking...")
 
620
 
621
 
622
- # Main content
623
  with gr.Column(scale=3):
 
624
  chatbot = gr.Chatbot(type='messages', label="AI Workflow Chat")
 
625
  user_input = gr.Textbox(placeholder="Type your message…", interactive=True)
626
- send_btn = gr.Button("Send", interactive=False)
627
-
628
- # Logic to enable send button only when logged in and API key is set
629
- def update_send_button_state(profile: gr.OAuthProfile | None, token: gr.OAuthToken | None, key: str | None, model: str | None, *args, **kwargs):
630
- """
631
- Gradio sometimes passes extra positional/keyword args (old_value, event, etc.),
632
- so we absorb them with *args/**kwargs.
633
- Returns Gradio.update for the button's interactive state and a status string.
634
- """
 
 
 
 
 
 
635
  is_logged_in = profile is not None and token is not None
636
- is_gemini_ready = key is not None and model is not None
637
-
638
- # Print states for debugging
639
- print(f"update_send_button_state called:")
640
- print(f" Profile: {profile.username if profile else None}")
641
- print(f" Token: {'Set' if token else 'None'}")
642
- print(f" API Key: {'Set' if key else 'None'}")
643
- print(f" Model: {model}")
644
- print(f" Logged in: {is_logged_in}, Gemini Ready: {is_gemini_ready}")
645
-
646
- status_str = ""
647
- if is_logged_in and is_gemini_ready:
 
648
  status_str = "✅ Ready to send commands."
649
  else:
650
- status_parts = []
651
- if not is_logged_in:
652
- status_parts.append("⚠️ Not logged in to Hugging Face.")
653
- if not key:
654
- status_parts.append("⚠️ Gemini API key not set.")
655
- if not model:
656
- status_parts.append("⚠️ Gemini model not selected.")
657
  status_str = " ".join(status_parts)
658
- if not status_str:
659
- status_str = " Ready to send commands."
660
 
661
 
662
- return gr.update(interactive=is_logged_in and is_gemini_ready), status_str
 
663
 
664
- # --- Implement Chained Events ---
 
665
 
666
- # 1. Login Button: Update profile/token state, THEN update send button state
 
667
  login_btn.click(
668
- lambda x: (x[0], x[1]), # Extract profile and token from LoginButton output
 
669
  inputs=[login_btn],
670
- outputs=[hf_profile, hf_token]
671
  ).then( # Chain the next action after state is updated
672
  update_send_button_state,
673
  inputs=[hf_profile, hf_token, gemini_key, gemini_model],
674
- outputs=[send_btn, prereq_status]
675
  )
676
 
677
-
678
- # 2. Gemini Key Input: Update key state, THEN configure Gemini, THEN update send button state
 
679
  gemini_input.change(
680
- lambda k: k, # Update gemini_key state
681
  inputs=[gemini_input],
682
- outputs=[gemini_key]
683
- ).then( # Chain configure_gemini after key is updated
684
  configure_gemini,
685
  inputs=[gemini_key, gemini_model],
686
- outputs=[gemini_status]
687
  ).then( # Chain update_send_button_state after config status is updated
688
  update_send_button_state,
689
  inputs=[hf_profile, hf_token, gemini_key, gemini_model],
690
- outputs=[send_btn, prereq_status]
691
  )
692
 
693
-
694
- # 3. Gemini Model Selector: Update model state, THEN configure Gemini, THEN update send button state
 
695
  model_selector.change(
696
- lambda m: m, # Update gemini_model state
697
  inputs=[model_selector],
698
- outputs=[gemini_model]
699
- ).then( # Chain configure_gemini after model is updated
700
  configure_gemini,
701
  inputs=[gemini_key, gemini_model],
702
- outputs=[gemini_status]
703
  ).then( # Chain update_send_button_state after config status is updated
704
  update_send_button_state,
705
  inputs=[hf_profile, hf_token, gemini_key, gemini_model],
706
- outputs=[send_btn, prereq_status]
707
  )
708
 
709
- # 4. Initial Load: Update send button state based on initial (or cached) state
 
710
  ai_builder_tab.load(
711
  update_send_button_state,
712
  inputs=[hf_profile, hf_token, gemini_key, gemini_model],
713
- outputs=[send_btn, prereq_status]
714
  )
715
 
716
-
717
- iframe = gr.HTML("<p>No Space created yet.</p>")
718
- build_txt = gr.Textbox(label="Build Logs", lines=10, interactive=False, value="")
719
- run_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False, value="")
 
720
 
721
  # The main event handler for the Send button
 
722
  send_btn.click(
723
- ai_workflow_chat,
 
724
  inputs=[
725
- user_input, chatbot,
726
- hf_profile, hf_token,
727
- gemini_key, gemini_model,
728
- repo_id, workflow, sdk_state,
729
- iframe, run_txt, build_txt,
730
- debug_attempts, app_description, repo_name_state, generated_code_state
731
  ],
 
732
  outputs=[
733
- chatbot,
734
- repo_id, workflow,
735
- iframe, run_txt, build_txt,
736
- debug_attempts, app_description, repo_name_state, generated_code_state
737
  ]
738
- ).success(
 
739
  lambda: gr.update(value=""),
740
  inputs=None,
741
- outputs=user_input
742
  )
743
 
744
- # Link state variables to UI status displays (reactive updates)
 
745
  workflow.change(lambda s: s, inputs=workflow, outputs=status_text)
 
746
  repo_id.change(lambda r: r if r else "None", inputs=repo_id, outputs=repo_id_text)
747
 
748
 
749
- # Add an initial message to the chatbot on load
750
  def greet():
751
  return [{"role": "assistant", "content": "Welcome! Please log in to Hugging Face and provide your Google AI Studio API key to start building Spaces. Once ready, type 'generate me a gradio app called myapp' or 'create' to begin."}]
752
 
@@ -754,12 +989,21 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
754
 
755
 
756
  if __name__ == "__main__":
757
- # Optional: Configure retries for huggingface_hub requests
758
  # from requests.adapters import HTTPAdapter
759
  # from urllib3.util.retry import Retry
760
- # retry_strategy = Retry(total=5, backoff_factor=1, status_forcelist=[429, 500, 502, 503, 504])
761
  # adapter = HTTPAdapter(max_retries=retry_strategy)
762
- # get_session().mount("http://", adapter)
763
- # get_session().mount("https://", adapter)
764
-
765
- ai_builder_tab.launch()
 
 
 
 
 
 
 
 
 
 
14
  # --- Helper functions for Hugging Face integration ---
15
 
16
  def show_profile(profile: gr.OAuthProfile | None) -> str:
17
+ """Displays the logged-in Hugging Face profile username."""
18
  if profile is None:
19
  return "*Not logged in.*"
20
  return f"✅ Logged in as **{profile.username}**"
 
23
  profile: gr.OAuthProfile | None,
24
  oauth_token: gr.OAuthToken | None
25
  ) -> str:
26
+ """Lists private models for the logged-in user (not used in the main workflow, but kept)."""
27
  if profile is None or oauth_token is None:
28
  return "Please log in to see your models."
29
  try:
 
33
  ]
34
  return "No models found." if not models else "Models:\n\n" + "\n - ".join(models)
35
  except Exception as e:
36
+ # Catching generic exception is acceptable for helper functions
37
  return f"Error listing models: {e}"
38
 
39
  def create_space_action(repo_name: str, sdk: str, profile: gr.OAuthProfile, token: gr.OAuthToken):
40
+ """Creates a new Hugging Face Space repository."""
41
+ if not profile or not token:
42
+ raise ValueError("Hugging Face profile or token is missing.")
43
  repo_id = f"{profile.username}/{repo_name}"
44
  try:
45
  create_repo(
46
  repo_id=repo_id,
47
  token=token.token,
48
+ exist_ok=True, # Allow creating if it already exists
49
  repo_type="space",
50
  space_sdk=sdk
51
  )
 
56
  raise RuntimeError(f"Failed to create Space {repo_id}: {e}")
57
 
58
  def upload_file_to_space_action(
59
+ file_obj: io.StringIO, # Specify type hint for clarity
60
  path_in_repo: str,
61
  repo_id: str,
62
  profile: gr.OAuthProfile,
63
  token: gr.OAuthToken
64
  ) -> None:
65
+ """Uploads a file to a Hugging Face Space repository."""
66
  if not (profile and token and repo_id):
67
  raise ValueError("Hugging Face profile, token, or repo_id is missing.")
68
  try:
 
77
  raise RuntimeError(f"Failed to upload `{path_in_repo}` to {repo_id}: {e}")
78
 
79
  def _fetch_space_logs_level(repo_id: str, level: str, token: str) -> str:
80
+ """Fetches build or run logs for a Space."""
81
  if not repo_id or not token:
82
  return f"Cannot fetch {level} logs: repo_id or token missing."
83
  jwt_url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt"
84
  try:
85
  r = get_session().get(jwt_url, headers=build_hf_headers(token=token))
86
+ hf_raise_for_status(r) # Raise HTTPError for bad responses (4xx or 5xx)
87
  jwt = r.json()["token"]
88
  logs_url = f"https://api.hf.space/v1/{repo_id}/logs/{level}"
89
  lines, count = [], 0
90
+ # Using stream=True is good for potentially large logs
91
  with get_session().get(logs_url, headers=build_hf_headers(token=jwt), stream=True, timeout=30) as resp:
92
  hf_raise_for_status(resp)
93
  for raw in resp.iter_lines():
94
+ if count >= 200: # Limit output lines to prevent UI overload
95
  lines.append("... truncated ...")
96
  break
97
+ if not raw.startswith(b"data: "): # EventStream protocol expected from HF logs API
98
  continue
99
  payload = raw[len(b"data: "):]
100
  try:
 
105
  lines.append(f"[{ts}] {txt}")
106
  count += 1
107
  except json.JSONDecodeError:
108
+ # Skip lines that aren't valid JSON events
109
  continue
110
  return "\n".join(lines) if lines else f"No {level} logs found."
111
  except Exception as e:
112
+ # Catching generic exception is acceptable for helper functions
113
  return f"Error fetching {level} logs: {e}"
114
 
115
 
116
  def get_build_logs_action(repo_id, profile, token):
117
+ """Action to fetch build logs with a small delay."""
118
  if not (repo_id and profile and token):
119
  return "⚠️ Cannot fetch build logs: log in and create a Space first."
120
+ # Small delay to allow build process to potentially start on HF side
121
  time.sleep(5)
122
  return _fetch_space_logs_level(repo_id, "build", token.token)
123
 
124
  def get_container_logs_action(repo_id, profile, token):
125
+ """Action to fetch container logs with a delay."""
126
  if not (repo_id and profile and token):
127
  return "⚠️ Cannot fetch container logs: log in and create a Space first."
128
+ # Longer delay to allow container to start after build completes
129
  time.sleep(10)
130
  return _fetch_space_logs_level(repo_id, "run", token.token)
131
 
 
133
  # --- Google Gemini integration with model selection ---
134
 
135
  def configure_gemini(api_key: str | None, model_name: str | None) -> str:
136
+ """Configures the Gemini API and checks if the model is accessible."""
137
  if not api_key:
138
  return "Gemini API key is not set."
139
  if not model_name:
140
  return "Please select a Gemini model."
141
  try:
142
  genai.configure(api_key=api_key)
143
+ # Attempt a simple call to verify credentials and model availability
144
+ # This will raise an exception if the key is invalid or model not found
145
  genai.GenerativeModel(model_name).generate_content("ping", stream=False)
146
  return f"Gemini configured successfully with **{model_name}**."
147
  except Exception as e:
148
  return f"Error configuring Gemini: {e}"
149
 
150
  def call_gemini(prompt: str, api_key: str, model_name: str) -> str:
151
+ """Calls the Gemini API with a given prompt."""
152
  if not api_key or not model_name:
153
  raise ValueError("Gemini API key or model not set.")
154
  try:
155
  genai.configure(api_key=api_key)
156
  model = genai.GenerativeModel(model_name)
157
+ # Using generate_content and stream=False for simplicity here
158
  response = model.generate_content(prompt, stream=False)
159
+ return response.text or "" # Return empty string if no text
160
  except Exception as e:
161
+ # Re-raising as RuntimeError for the workflow to catch and manage
162
  raise RuntimeError(f"Gemini API call failed: {e}")
163
 
164
 
165
  # --- AI workflow logic (State Machine) ---
166
 
167
+ # Define States for the workflow
168
  STATE_IDLE = "idle"
169
  STATE_AWAITING_REPO_NAME = "awaiting_repo_name"
170
  STATE_CREATING_SPACE = "creating_space"
 
180
  STATE_UPLOADING_FIXED_APP_PY = "uploading_fixed_app_py"
181
  STATE_COMPLETE = "complete"
182
 
183
+ MAX_DEBUG_ATTEMPTS = 3 # Limit the number of automatic debug attempts
184
 
185
  def add_bot_message(history: list[dict], bot_message: str) -> list[dict]:
186
+ """Helper to add a new assistant message to the chatbot history."""
187
  history.append({"role": "assistant", "content": bot_message})
188
  return history
189
 
190
+ # This is the main generator function for the workflow, triggered by the 'Send' button
191
  def ai_workflow_chat(
192
  message: str,
193
  history: list[dict],
 
198
  repo_id_state: str | None,
199
  workflow_state: str,
200
  space_sdk: str,
201
+ preview_html: str, # Passed in to maintain its value in State
202
+ container_logs: str, # Passed in to maintain its value in State
203
+ build_logs: str, # Passed in to maintain its value in State
204
  debug_attempts_state: int,
205
  app_description_state: str | None,
206
  repo_name_state: str | None,
207
  generated_code_state: str | None,
208
+ # Absorb potential extra args passed by Gradio event listeners (e.g. old value, event data)
209
  *args,
210
  **kwargs
211
  ) -> tuple[
212
+ list[dict], # 0: Updated chat history
213
+ str | None, # 1: Updated repo_id
214
+ str, # 2: Updated workflow state
215
+ str, # 3: Updated iframe HTML
216
+ str, # 4: Updated container logs
217
+ str, # 5: Updated build logs
218
+ int, # 6: Updated debug attempts count
219
+ str | None, # 7: Updated app description
220
+ str | None, # 8: Updated repo name
221
+ str | None, # 9: Updated generated code (for temporary storage)
222
  ]:
223
+ """
224
+ Generator function to handle the AI workflow state machine.
225
+ Each 'yield' pauses execution and sends values to update Gradio outputs/state.
226
+ """
227
+ # Unpack state variables from Gradio State components passed as inputs
228
  repo_id = repo_id_state
229
  state = workflow_state
230
  attempts = debug_attempts_state
 
232
  repo_name = repo_name_state
233
  generated_code = generated_code_state
234
 
235
+ # Keep copies of potentially updated UI elements passed as inputs to update them later
236
  updated_preview = preview_html
237
  updated_build = build_logs
238
  updated_run = container_logs
239
 
240
+ # Add the user's message to the chat history immediately
241
  user_message_entry = {"role": "user", "content": message}
242
+ # Add username if logged in (optional, but nice)
243
  if hf_profile and hf_profile.username:
244
  user_message_entry["name"] = hf_profile.username
245
  history.append(user_message_entry)
246
 
247
+ # Yield immediately to update the chat UI with the user's message
248
+ # This provides immediate feedback to the user while the AI processes
249
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
250
+ attempts, app_desc, repo_name, generated_code)
251
 
252
  try:
253
+ # --- State Machine Logic based on the current 'state' variable ---
254
 
255
  if state == STATE_IDLE:
256
+ # Check prerequisites before starting any workflow actions
257
  if not (hf_profile and hf_token):
258
  history = add_bot_message(history, "Workflow paused: Please log in to Hugging Face first.")
259
+ # Yield updated history and current state, then exit for this click
260
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
261
+ attempts, app_desc, repo_name, generated_code)
262
+ return # Exit the generator for this click
263
+
264
  if not (gemini_api_key and gemini_model):
265
  history = add_bot_message(history, "Workflow paused: Please enter your API key and select a Gemini model.")
266
+ # Yield updated history and current state, then exit for this click
267
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
268
+ attempts, app_desc, repo_name, generated_code)
269
+ return # Exit the generator for this click
270
 
271
+ # Look for specific commands in the user's message
272
  reset_match = "reset" in message.lower()
273
+ # Capture app description AND repo name using regex
274
+ generate_match = re.search(r'generate (?:me )?(?:a|an) (.+) app called (\w+)', message, re.I)
275
+ # Capture repo name for a simple 'create space' command
276
  create_match = re.search(r'create (?:a|an)? space called (\w+)', message, re.I)
277
 
278
  if reset_match:
279
+ # Reset the workflow state and associated variables
280
  history = add_bot_message(history, "Workflow reset.")
281
+ # Yield updated history and reset state variables to their initial values
282
+ yield (history, None, STATE_IDLE, "<p>No Space created yet.</p>", "", "", 0,
283
+ None, None, None)
284
+ # No return needed after yield in this generator pattern; execution for this click ends here.
285
 
286
  elif generate_match:
287
+ # User requested generation with description and name
288
+ new_app_desc = generate_match.group(1).strip() # Capture description part
289
+ new_repo_name = generate_match.group(2).strip() # Capture name part
290
+ history = add_bot_message(history, f"Acknowledged: '{message}'. Starting workflow to create Space `{hf_profile.username}/{new_repo_name}` for a '{new_app_desc}' app.")
291
+ # Update state variables for the next step (creation)
292
  state = STATE_CREATING_SPACE
293
  repo_name = new_repo_name
294
  app_desc = new_app_desc
295
+ # Yield updated history and state variables
296
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
297
+ attempts, app_desc, repo_name, generated_code)
298
+ # No return needed
299
 
300
  elif create_match:
301
+ # User requested simple space creation with a name
302
+ new_repo_name = create_match.group(1).strip()
303
  history = add_bot_message(history, f"Acknowledged: '{message}'. Starting workflow to create Space `{hf_profile.username}/{new_repo_name}`.")
304
+ # Update state variables for the next step (creation)
305
  state = STATE_CREATING_SPACE
306
  repo_name = new_repo_name
307
+ # Yield updated history and state variables
308
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
309
+ attempts, app_desc, repo_name, generated_code)
310
+ # No return needed
311
 
312
  elif "create" in message.lower() and not repo_id:
313
+ # User wants to create but didn't specify a name yet
314
  history = add_bot_message(history, "Okay, what should the Space be called? (e.g., `my-awesome-app`)")
315
+ state = STATE_AWAITING_REPO_NAME # Transition to the state where we wait for the name
316
+ # Yield updated history and state
317
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
318
+ attempts, app_desc, repo_name, generated_code)
319
+ # No return needed
320
 
321
  else:
322
+ # Command not recognized in IDLE state
323
  history = add_bot_message(history, "Command not recognized. Try 'generate me a gradio app called myapp', or 'reset'.")
324
+ # Yield updated history and current state
325
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
326
+ attempts, app_desc, repo_name, generated_code)
327
+ # No return needed
328
 
329
 
330
+ elif state == STATE_AWAITING_REPO_NAME:
331
+ # User's message is expected to be the repo name
332
+ new_repo_name = message.strip()
333
+ # Basic validation for Hugging Face repo name format
334
+ if not new_repo_name or re.search(r'[^a-zA-Z0-9_-]', new_repo_name):
335
  history = add_bot_message(history, "Invalid name. Please provide a single word/slug for the Space name (letters, numbers, underscores, hyphens only).")
336
+ # Stay in AWAITING_REPO_NAME state and yield message
337
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
338
+ attempts, app_desc, repo_name, generated_code)
339
+ # No return needed
 
 
340
 
341
+ else:
342
+ history = add_bot_message(history, f"Using Space name `{new_repo_name}`. Creating Space `{hf_profile.username}/{new_repo_name}`...")
343
+ state = STATE_CREATING_SPACE # Transition state to creation
344
+ repo_name = new_repo_name # Store the validated repo name
345
+ # Yield updated history, state, and repo name.
346
+ # The next click will proceed from the STATE_CREATING_SPACE block.
347
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
348
+ attempts, app_desc, repo_name, generated_code)
349
+ # No return needed
350
+
351
+ # Note: Each 'elif' block below represents a distinct step in the workflow triggered
352
+ # when the 'state' variable matches its condition on a button click.
353
 
354
  elif state == STATE_CREATING_SPACE:
355
+ # Ensure repo_name is available (it should have been set in the previous step)
356
+ if not repo_name:
357
+ history = add_bot_message(history, "Internal error: Repo name missing for creation. Resetting.")
358
+ yield (history, None, STATE_IDLE, "<p>Error creating space.</p>", "", "", 0,
359
+ None, None, None)
360
+ # No return needed
 
 
 
 
 
 
361
 
362
+ else:
363
+ try:
364
+ # Perform the action to create the Space on Hugging Face
365
+ new_repo_id, iframe_html = create_space_action(repo_name, space_sdk, hf_profile, hf_token)
366
+ updated_preview = iframe_html # Update the iframe content to show the new space
367
+ repo_id = new_repo_id # Store the official repo_id
368
+ history = add_bot_message(history, f"✅ Space `{repo_id}` created. Click 'Send' to generate and upload code.")
369
+ state = STATE_GENERATING_CODE # Transition to the next state
370
+ # Yield updated state variables and history
371
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
372
+ attempts, app_desc, repo_name, generated_code)
373
+ # No return needed
374
+
375
+ except Exception as e:
376
+ history = add_bot_message(history, f"❌ Error creating space: {e}. Click 'reset'.")
377
+ # Yield error message and reset state on failure
378
+ yield (history, None, STATE_IDLE, "<p>Error creating space.</p>", "", "", 0,
379
+ None, None, None)
380
+ # No return needed
381
 
382
 
383
  elif state == STATE_GENERATING_CODE:
384
+ # Define the prompt for Gemini based on the app description or a default
385
+ prompt_desc = app_desc if app_desc else 'a Gradio image-blur test app with upload and slider controls'
386
+ prompt = f"""
387
  You are an AI assistant specializing in Hugging Face Spaces using the {space_sdk} SDK.
388
  Generate a full, single-file Python app based on:
389
  '{prompt_desc}'
390
  Return **only** the python code block for app.py. Do not include any extra text, explanations, or markdown outside the code block.
391
  """
392
+ try:
393
+ history = add_bot_message(history, "🧠 Generating `app.py` code with Gemini...")
394
+ # Yield to show message before the potentially time-consuming API call
395
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
396
+ attempts, app_desc, repo_name, generated_code)
397
+
398
+ # Perform the Gemini API call to generate code
399
+ code = call_gemini(prompt, gemini_api_key, gemini_model)
400
+ code = code.strip()
401
+ # Clean up common markdown code block formatting if present
402
+ if code.startswith("```python"):
403
+ code = code[len("```python"):].strip()
404
+ if code.endswith("```"):
405
+ code = code[:-len("```")].strip()
406
+
407
+ if not code:
408
+ raise ValueError("Gemini returned empty code.")
409
+
410
+ history = add_bot_message(history, "✅ `app.py` code generated. Click 'Send' to upload.")
411
+ state = STATE_UPLOADING_APP_PY # Transition to the upload state
412
+ generated_code = code # Store the generated code in the state variable for the next step
413
+ # Yield updated state variables and history
414
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
415
+ attempts, app_desc, repo_name, generated_code)
416
+ # No return needed
417
 
418
+ except Exception as e:
419
+ history = add_bot_message(history, f"❌ Error generating code: {e}. Click 'reset'.")
420
+ # Yield error message and reset state on failure
421
+ yield (history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0,
422
+ None, None, None)
423
+ # No return needed
424
 
425
 
426
  elif state == STATE_UPLOADING_APP_PY:
427
+ # Retrieve the generated code from the state variable
428
+ code_to_upload = generated_code
429
+ if not code_to_upload:
430
+ history = add_bot_message(history, "Internal error: No code to upload. Resetting.")
431
+ yield (history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0,
432
+ None, None, None)
433
+ # No return needed
434
 
435
+ else:
436
+ history = add_bot_message(history, "☁️ Uploading `app.py`...")
437
+ # Yield to show message before the upload action
438
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
439
+ attempts, app_desc, repo_name, generated_code)
440
+
441
+ try:
442
+ # Perform the file upload action
443
+ upload_file_to_space_action(io.StringIO(code_to_upload), "app.py", repo_id, hf_profile, hf_token)
444
+ history = add_bot_message(history, "✅ Uploaded `app.py`. Click 'Send' to generate requirements.")
445
+ state = STATE_GENERATING_REQUIREMENTS # Transition state
446
+ generated_code = None # Clear the stored code after use to free memory/state space
447
+ # Yield updated state variables and history
448
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
449
+ attempts, app_desc, repo_name, generated_code)
450
+ # No return needed
451
+
452
+ except Exception as e:
453
+ history = add_bot_message(history, f"❌ Error uploading app.py: {e}. Click 'reset'.")
454
+ # Yield error message and reset state on failure
455
+ yield (history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0,
456
+ None, None, None)
457
+ # No return needed
458
 
459
 
460
  elif state == STATE_GENERATING_REQUIREMENTS:
461
+ history = add_bot_message(history, "📄 Generating `requirements.txt`...")
462
+ # Yield to show message before generating requirements
463
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
464
+ attempts, app_desc, repo_name, generated_code)
465
+
466
+ # Logic to determine required packages based on SDK and keywords in the app description
467
+ reqs_list = ["gradio"] if space_sdk == "gradio" else ["streamlit"]
468
+ if app_desc: # Check app_desc for keywords only if it's not None
469
+ app_desc_lower = app_desc.lower()
470
+ if "google.generativeai" in app_desc_lower or "gemini" in app_desc_lower or gemini_api_key:
471
+ reqs_list.append("google-generativeai")
472
+ if "requests" in app_desc_lower:
473
+ reqs_list.append("requests")
474
+ # Add common libraries if description suggests they might be needed
475
+ if "image" in app_desc_lower or "upload" in app_desc_lower or "blur" in app_desc_lower or "vision" in app_desc_lower:
476
+ reqs_list.append("Pillow") # Pillow is a common image processing library
477
+ if "numpy" in app_desc_lower: reqs_list.append("numpy")
478
+ if "pandas" in app_desc_lower: reqs_list.append("pandas")
479
+ # Add scikit-image and opencv if image processing is heavily implied
480
+ if any(lib in app_desc_lower for lib in ["scikit-image", "skimage", "cv2", "opencv"]):
481
+ reqs_list.extend(["scikit-image", "opencv-python"]) # Note: opencv-python for pip
482
+
483
+ # Add essential libraries regardless of description keywords
484
+ reqs_list.append("huggingface_hub") # Needed for interaction helpers if used in app
485
+
486
+ # Use dict.fromkeys to get unique items while preserving insertion order (Python 3.7+)
487
+ reqs_list = list(dict.fromkeys(reqs_list))
488
+
489
+ reqs_content = "\n".join(reqs_list) + "\n"
490
+
491
+ history = add_bot_message(history, "✅ `requirements.txt` generated. Click 'Send' to upload.")
492
+ state = STATE_UPLOADING_REQUIREMENTS # Transition state
493
+ generated_code = reqs_content # Store requirements content
494
+ # Yield updated state variables and history
495
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
496
+ attempts, app_desc, repo_name, generated_code)
497
+ # No return needed
498
 
499
 
500
  elif state == STATE_UPLOADING_REQUIREMENTS:
501
+ # Retrieve requirements content from state variable
502
  reqs_content_to_upload = generated_code
503
  if not reqs_content_to_upload:
504
  history = add_bot_message(history, "Internal error: No requirements content to upload. Resetting.")
505
+ yield (history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0,
506
+ None, None, None)
507
+ # No return needed
 
 
 
 
 
 
 
 
 
508
 
509
+ else:
510
+ history = add_bot_message(history, "☁️ Uploading `requirements.txt`...")
511
+ # Yield to show message before upload
512
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
513
+ attempts, app_desc, repo_name, generated_code)
514
+
515
+ try:
516
+ # Perform requirements file upload
517
+ upload_file_to_space_action(io.StringIO(reqs_content_to_upload), "requirements.txt", repo_id, hf_profile, hf_token)
518
+ history = add_bot_message(history, "✅ Uploaded `requirements.txt`. Click 'Send' to generate README.")
519
+ state = STATE_GENERATING_README # Transition state
520
+ generated_code = None # Clear content after use
521
+ # Yield updated state variables and history
522
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
523
+ attempts, app_desc, repo_name, generated_code)
524
+ # No return needed
525
+
526
+ except Exception as e:
527
+ history = add_bot_message(history, f"❌ Error uploading requirements.txt: {e}. Click 'reset'.")
528
+ # Yield error message and reset state on failure
529
+ yield (history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0,
530
+ None, None, None)
531
+ # No return needed
532
 
533
  elif state == STATE_GENERATING_README:
534
  history = add_bot_message(history, "📝 Generating `README.md`...")
535
+ # Yield message before generating README
536
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
537
+ attempts, app_desc, repo_name, generated_code)
538
 
539
+ # Generate simple README content with Space metadata header
540
  readme_title = repo_name if repo_name else "My Awesome Space"
541
  readme_description = app_desc if app_desc else f"This Hugging Face Space hosts an AI-generated {space_sdk} application."
542
 
543
+ readme_content = f"""---
544
+ title: {readme_title}
545
+ emoji: 🚀
546
+ colorFrom: blue
547
+ colorTo: yellow
548
+ sdk: {space_sdk}
549
+ app_file: app.py
550
+ pinned: false
551
+ ---
552
+
553
+ # {readme_title}
554
+
555
+ {readme_description}
556
+
557
+ This Space was automatically generated by an AI workflow using Google Gemini and Gradio.
558
+ """ # Added Space metadata header and slightly improved content
559
 
560
  history = add_bot_message(history, "✅ `README.md` generated. Click 'Send' to upload.")
561
+ state = STATE_UPLOADING_README # Transition state
562
+ generated_code = readme_content # Store README content
563
+ # Yield updated state variables and history
564
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
565
+ attempts, app_desc, repo_name, generated_code)
566
+ # No return needed
567
 
568
 
569
  elif state == STATE_UPLOADING_README:
570
+ # Retrieve README content from state variable
571
  readme_content_to_upload = generated_code
572
  if not readme_content_to_upload:
573
  history = add_bot_message(history, "Internal error: No README content to upload. Resetting.")
574
+ yield (history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0,
575
+ None, None, None)
576
+ # No return needed
 
 
 
 
 
 
 
 
 
 
 
 
 
 
577
 
578
+ else:
579
+ history = add_bot_message(history, "☁️ Uploading `README.md`...")
580
+ # Yield message before upload
581
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
582
+ attempts, app_desc, repo_name, generated_code)
583
+
584
+ try:
585
+ # Perform README file upload
586
+ upload_file_to_space_action(io.StringIO(readme_content_to_upload), "README.md", repo_id, hf_profile, hf_token)
587
+ history = add_bot_message(history, "✅ Uploaded `README.md`. All files uploaded. Space is now building. Click 'Send' to check build logs.")
588
+ state = STATE_CHECKING_LOGS_BUILD # Transition to checking build logs
589
+ generated_code = None # Clear content after use
590
+ # Yield updated state variables and history
591
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
592
+ attempts, app_desc, repo_name, generated_code)
593
+ # No return needed
594
+
595
+ except Exception as e:
596
+ history = add_bot_message(history, f"❌ Error uploading README.md: {e}. Click 'reset'.")
597
+ # Yield error message and reset state on failure
598
+ yield (history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0,
599
+ None, None, None)
600
+ # No return needed
601
 
602
  elif state == STATE_CHECKING_LOGS_BUILD:
603
  history = add_bot_message(history, "🔍 Fetching build logs...")
604
+ # Yield message before fetching logs (which includes a delay)
605
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
606
+ attempts, app_desc, repo_name, generated_code)
607
 
608
+ # Fetch build logs from HF Space
609
  build_logs_text = get_build_logs_action(repo_id, hf_profile, hf_token)
610
+ updated_build = build_logs_text # Update the logs display variable
611
 
612
+ # Simple check for common error indicators in logs (case-insensitive)
613
+ if "error" in updated_build.lower() or "exception" in updated_build.lower() or "build failed" in updated_build.lower():
614
+ history = add_bot_message(history, "⚠️ Build logs indicate potential issues. Please inspect above. Click 'Send' to check container logs (app might still start despite build warnings).")
615
+ state = STATE_CHECKING_LOGS_RUN # Transition even on build error, to see if container starts
616
+ # Yield updated state, logs, and variables
617
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
618
+ attempts, app_desc, repo_name, generated_code)
619
+ # No return needed
620
 
621
  else:
622
  history = add_bot_message(history, "✅ Build logs fetched. Click 'Send' to check container logs.")
623
+ state = STATE_CHECKING_LOGS_RUN # Transition to next log check
624
+ # Yield updated state, logs, and variables
625
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
626
+ attempts, app_desc, repo_name, generated_code)
627
+ # No return needed
628
 
629
 
630
  elif state == STATE_CHECKING_LOGS_RUN:
631
  history = add_bot_message(history, "🔍 Fetching container logs...")
632
+ # Yield message before fetching logs (includes a delay)
633
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
634
+ attempts, app_desc, repo_name, generated_code)
635
 
636
+ # Fetch container logs from HF Space
637
  container_logs_text = get_container_logs_action(repo_id, hf_profile, hf_token)
638
+ updated_run = container_logs_text # Update the logs display variable
639
 
640
+ # Check for errors in run logs and if we have debug attempts left
641
+ if ("error" in updated_run.lower() or "exception" in updated_run.lower()) and attempts < MAX_DEBUG_ATTEMPTS:
642
+ attempts += 1 # Increment debug attempts counter
643
  history = add_bot_message(history, f"❌ Errors detected in container logs. Attempting debug fix #{attempts}/{MAX_DEBUG_ATTEMPTS}. Click 'Send' to proceed.")
644
+ state = STATE_DEBUGGING_CODE # Transition to the debugging state
645
+ # Yield updated state, logs, attempts, and variables
646
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
647
+ attempts, app_desc, repo_name, generated_code)
648
+ # No return needed
649
+
650
+ elif ("error" in updated_run.lower() or "exception" in updated_run.lower()) and attempts >= MAX_DEBUG_ATTEMPTS:
651
+ # Max debug attempts reached
652
  history = add_bot_message(history, f"❌ Errors detected in container logs. Max debug attempts ({MAX_DEBUG_ATTEMPTS}) reached. Please inspect logs manually or click 'reset'.")
653
+ state = STATE_COMPLETE # Workflow ends on failure after attempts
654
+ # Yield updated state, logs, attempts, and variables
655
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
656
+ attempts, app_desc, repo_name, generated_code)
657
+ # No return needed
658
 
659
  else:
660
+ # No significant errors found in logs, assume success
661
  history = add_bot_message(history, "✅ App appears to be running successfully! Check the iframe above. Click 'reset' to start a new project.")
662
+ state = STATE_COMPLETE # Workflow ends on success
663
+ # Yield updated state, logs, attempts, and variables
664
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
665
+ attempts, app_desc, repo_name, generated_code)
666
+ # No return needed
667
 
668
 
669
  elif state == STATE_DEBUGGING_CODE:
670
  history = add_bot_message(history, f"🧠 Calling Gemini to generate fix based on logs...")
671
+ # Yield message before Gemini API call
672
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
673
+ attempts, app_desc, repo_name, generated_code)
674
 
675
+ # Construct prompt for Gemini including the container logs
676
  debug_prompt = f"""
677
  You are debugging a {space_sdk} Space. The goal is to fix the code in `app.py` based on the container logs provided.
678
 
679
  Here are the container logs:
680
+ Use code with caution.
681
+ Python
682
  {updated_run}
 
683
  Generate the *complete, fixed* content for `app.py` based on these logs.
684
  Return **only** the python code block for app.py. Do not include any extra text, explanations, or markdown outside the code block.
685
  """
686
  try:
687
+ # Call Gemini to generate the corrected code
688
  fix_code = call_gemini(debug_prompt, gemini_api_key, gemini_model)
689
  fix_code = fix_code.strip()
690
+ # Clean up potential markdown formatting
691
  if fix_code.startswith("```python"):
692
  fix_code = fix_code[len("```python"):].strip()
693
  if fix_code.endswith("```"):
 
697
  raise ValueError("Gemini returned empty fix code.")
698
 
699
  history = add_bot_message(history, "✅ Fix code generated. Click 'Send' to upload.")
700
+ state = STATE_UPLOADING_FIXED_APP_PY # Transition to the upload state for the fix
701
+ generated_code = fix_code # Store the generated fix code
702
+ # Yield updated state, code, and variables
703
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
704
+ attempts, app_desc, repo_name, generated_code)
705
+ # No return needed
706
 
707
  except Exception as e:
708
  history = add_bot_message(history, f"❌ Error generating debug code: {e}. Click 'reset'.")
709
+ # Yield error message and reset state on failure
710
+ yield (history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0,
711
+ None, None, None)
712
+ # No return needed
713
 
714
  elif state == STATE_UPLOADING_FIXED_APP_PY:
715
+ # Retrieve the fixed code from the state variable
716
  fixed_code_to_upload = generated_code
717
  if not fixed_code_to_upload:
718
  history = add_bot_message(history, "Internal error: No fixed code available to upload. Resetting.")
719
+ yield (history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0,
720
+ None, None, None)
721
+ # No return needed
 
 
 
 
 
 
 
 
 
722
 
723
+ else:
724
+ history = add_bot_message(history, "☁️ Uploading fixed `app.py`...")
725
+ # Yield message before upload
726
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
727
+ attempts, app_desc, repo_name, generated_code)
728
+
729
+ try:
730
+ # Perform the upload of the fixed app.py
731
+ upload_file_to_space_action(io.StringIO(fixed_code_to_upload), "app.py", repo_id, hf_profile, hf_token)
732
+ history = add_bot_message(history, "✅ Fixed `app.py` uploaded. Space will rebuild. Click 'Send' to check logs again.")
733
+ state = STATE_CHECKING_LOGS_RUN # Go back to checking run logs after uploading the fix
734
+ generated_code = None # Clear code after use
735
+ # Yield updated state, code, and variables
736
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
737
+ attempts, app_desc, repo_name, generated_code)
738
+ # No return needed
739
+
740
+ except Exception as e:
741
+ history = add_bot_message(history, f"❌ Error uploading fixed app.py: {e}. Click 'reset'.")
742
+ # Yield error message and reset state on failure
743
+ yield (history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0,
744
+ None, None, None)
745
+ # No return needed
746
 
747
  elif state == STATE_COMPLETE:
748
+ # If in the complete state, the workflow is finished for this project.
749
+ # Subsequent clicks just add user messages; we simply yield the current state.
750
+ yield (history, repo_id, state, updated_preview, updated_run, updated_build,
751
+ attempts, app_desc, repo_name, generated_code)
752
+ # No return needed
753
 
754
 
755
  except Exception as e:
756
+ # This catches any unexpected errors that occur within any state's logic
757
  error_message = f"Workflow step failed unexpectedly ({state}): {e}. Click 'Send' to re-attempt this step or 'reset'."
758
  history = add_bot_message(history, error_message)
759
+ print(f"Critical Error in state {state}: {e}") # Log the error for debugging purposes
760
+ # Yield an error state and reset essential workflow variables on critical failure
761
+ yield (history, None, STATE_IDLE, updated_preview, updated_run, updated_build, 0,
762
+ None, None, None)
763
+ # No return needed after yield
764
 
765
 
766
  # --- Build the Gradio UI ---
767
 
768
  with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
769
+ # Gradio State variables - these persist their values across user interactions (clicks)
770
  hf_profile = gr.State(None)
771
  hf_token = gr.State(None)
772
  gemini_key = gr.State(None)
773
+ gemini_model = gr.State("gemini-1.5-flash") # Default selected model
774
+ repo_id = gr.State(None) # Stores the ID of the created Space
775
+ workflow = gr.State(STATE_IDLE) # Stores the current state of the AI workflow
776
+ sdk_state = gr.State("gradio") # Stores the selected Space SDK (Gradio or Streamlit)
777
+ debug_attempts = gr.State(0) # Counter for how many debugging attempts have been made
778
+ app_description = gr.State(None) # Stores the user's initial description of the desired app
779
+ repo_name_state = gr.State(None) # Stores the chosen repository name for the Space
780
+ generated_code_state = gr.State(None) # Temporary storage for generated file content (app.py, reqs, README)
781
 
782
  with gr.Row():
783
+ # Sidebar column for inputs and status displays
784
  with gr.Column(scale=1, min_width=300):
785
  gr.Markdown("## Hugging Face Login")
786
  login_status = gr.Markdown("*Not logged in.*")
787
+ # Hugging Face Login Button
788
  login_btn = gr.LoginButton(variant="huggingface")
789
 
790
+ # Initial load event to check login status (if cached)
791
  ai_builder_tab.load(show_profile, outputs=login_status)
792
+ # Update status display when login button reports success
793
  login_btn.click(show_profile, outputs=login_status)
794
 
795
  gr.Markdown("## Google AI Studio API Key")
796
+ # Textbox for Gemini API key. Read from environment variable if available.
797
+ gemini_input = gr.Textbox(
798
+ label="API Key",
799
+ type="password", # Hides input for security
800
+ interactive=True,
801
+ value=os.environ.get("GOOGLE_API_KEY") # Pre-fill if GOOGLE_API_KEY env var is set
802
+ )
803
+ gemini_status = gr.Markdown("") # Display Gemini configuration status
804
 
805
  gr.Markdown("## Gemini Model")
806
+ # Radio buttons to select the Gemini model
807
  model_selector = gr.Radio(
808
  choices=[
809
  ("Gemini 1.5 Flash", "gemini-1.5-flash"),
810
  ("Gemini 1.5 Pro", "gemini-1.5-pro"),
811
  ("Gemini 1.0 Pro", "gemini-1.0-pro"),
812
  ],
813
+ value="gemini-1.5-flash", # Default selection
814
  label="Select model",
815
  interactive=True
816
  )
817
 
818
+ # Configure Gemini status on initial load (if API key env var is set)
819
  ai_builder_tab.load(
820
  configure_gemini,
821
  inputs=[gemini_key, gemini_model],
 
823
  )
824
 
825
  gr.Markdown("## Space SDK")
826
+ # Radio buttons to select the Space SDK (Gradio or Streamlit)
827
  sdk_selector = gr.Radio(choices=["gradio","streamlit"], value="gradio", label="Template SDK", interactive=True)
828
+ # Update the sdk_state state variable when the selection changes
829
  sdk_selector.change(lambda s: s, inputs=sdk_selector, outputs=sdk_state)
830
 
831
  gr.Markdown("## Workflow Status")
832
+ # Textboxes to display the current workflow state and Space ID
833
  status_text = gr.Textbox(label="Current State", value=STATE_IDLE, interactive=False)
834
  repo_id_text = gr.Textbox(label="Current Space ID", value="None", interactive=False)
835
 
836
+ # --- Prerequisite Status Indicator ---
837
+ # Markdown to show if prerequisites (HF login, Gemini key) are met
838
+ prereq_status = gr.Markdown("Checking prerequisites...")
839
 
840
 
841
+ # Main content area column
842
  with gr.Column(scale=3):
843
+ # Chatbot to display the conversation and workflow messages
844
  chatbot = gr.Chatbot(type='messages', label="AI Workflow Chat")
845
+ # Textbox for user input messages
846
  user_input = gr.Textbox(placeholder="Type your message…", interactive=True)
847
+ # Button to send the user message and trigger the workflow step
848
+ send_btn = gr.Button("Send", interactive=False) # Starts disabled until prereqs are met
849
+
850
+ # Helper function to control send button interactivity and prerequisite status text
851
+ # This function is triggered by changes in login status and Gemini configuration
852
+ def update_send_button_state(
853
+ profile: gr.OAuthProfile | None,
854
+ token: gr.OAuthToken | None,
855
+ key: str | None,
856
+ model: str | None,
857
+ # Absorb potential extra args Gradio passes to event handlers
858
+ *args,
859
+ **kwargs
860
+ ):
861
+ """Determines if the send button should be active and updates status text."""
862
  is_logged_in = profile is not None and token is not None
863
+ is_gemini_ready = key is not None and model is not None # Check if key and model are set
864
+
865
+ status_parts = []
866
+ if not is_logged_in:
867
+ status_parts.append("⚠️ Not logged in to Hugging Face.")
868
+ if not key:
869
+ status_parts.append("⚠️ Gemini API key not set.")
870
+ if not model:
871
+ status_parts.append("⚠️ Gemini model not selected.")
872
+
873
+ is_ready = is_logged_in and is_gemini_ready
874
+
875
+ if is_ready:
876
  status_str = "✅ Ready to send commands."
877
  else:
 
 
 
 
 
 
 
878
  status_str = " ".join(status_parts)
879
+ if not status_str: # Fallback, should not be needed if not is_ready
880
+ status_str = "Checking prerequisites..."
881
 
882
 
883
+ # gr.update is used to dynamically change a component's properties
884
+ return gr.update(interactive=is_ready), status_str
885
 
886
+ # --- Implement Chained Events for Prerequisites ---
887
+ # Gradio's `.then()` allows chaining events: Action A happens, then Action B happens.
888
 
889
+ # 1. Login Button: When clicked and successful, update profile/token state,
890
+ # THEN update send button state based on all prereqs.
891
  login_btn.click(
892
+ # The LoginButton outputs a tuple (OAuthProfile, OAuthToken) on success
893
+ lambda x: (x[0], x[1]),
894
  inputs=[login_btn],
895
+ outputs=[hf_profile, hf_token] # Update these State variables
896
  ).then( # Chain the next action after state is updated
897
  update_send_button_state,
898
  inputs=[hf_profile, hf_token, gemini_key, gemini_model],
899
+ outputs=[send_btn, prereq_status] # Update button interactivity and status text
900
  )
901
 
902
+ # 2. Gemini Key Input: When text changes, update key state,
903
+ # THEN configure Gemini status, THEN update send button state.
904
+ # The Textbox 'change' event passes the new value as its input
905
  gemini_input.change(
906
+ lambda k: k, # Simple function to pass the new value to the state variable
907
  inputs=[gemini_input],
908
+ outputs=[gemini_key] # Update gemini_key state variable
909
+ ).then( # Chain configure_gemini after key state is updated
910
  configure_gemini,
911
  inputs=[gemini_key, gemini_model],
912
+ outputs=[gemini_status] # Update Gemini status text
913
  ).then( # Chain update_send_button_state after config status is updated
914
  update_send_button_state,
915
  inputs=[hf_profile, hf_token, gemini_key, gemini_model],
916
+ outputs=[send_btn, prereq_status] # Update button interactivity and status text
917
  )
918
 
919
+ # 3. Gemini Model Selector: When selection changes, update model state,
920
+ # THEN configure Gemini status, THEN update send button state.
921
+ # The Radio 'change' event passes the new value as its input
922
  model_selector.change(
923
+ lambda m: m, # Simple function to pass the new value to the state variable
924
  inputs=[model_selector],
925
+ outputs=[gemini_model] # Update gemini_model state variable
926
+ ).then( # Chain configure_gemini after model state is updated
927
  configure_gemini,
928
  inputs=[gemini_key, gemini_model],
929
+ outputs=[gemini_status] # Update Gemini status text
930
  ).then( # Chain update_send_button_state after config status is updated
931
  update_send_button_state,
932
  inputs=[hf_profile, hf_token, gemini_key, gemini_model],
933
+ outputs=[send_btn, prereq_status] # Update button interactivity and status text
934
  )
935
 
936
+ # 4. Initial Load: On page load, check prereqs and update send button/status.
937
+ # This accounts for cached logins or environment variables set before launch.
938
  ai_builder_tab.load(
939
  update_send_button_state,
940
  inputs=[hf_profile, hf_token, gemini_key, gemini_model],
941
+ outputs=[send_btn, prereq_status] # Update button interactivity and status text
942
  )
943
 
944
+ # UI elements to display the Space preview iframe and build/run logs
945
+ iframe = gr.HTML("<p>No Space created yet.</p>") # HTML element for the Space iframe
946
+ # Textboxes for logs, interactive=False means user can't type here
947
+ build_txt = gr.Textbox(label="Build Logs", lines=10, interactive=False, value="", max_lines=20) # Set max_lines for scrollability
948
+ run_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False, value="", max_lines=20) # Set max_lines for scrollability
949
 
950
  # The main event handler for the Send button
951
+ # This .click() event triggers the ai_workflow_chat generator function
952
  send_btn.click(
953
+ ai_workflow_chat, # The generator function to run
954
+ # Inputs are read from UI components and State variables
955
  inputs=[
956
+ user_input, chatbot, # UI inputs (message, current chat history)
957
+ hf_profile, hf_token, # HF State variables
958
+ gemini_key, gemini_model, # Gemini State variables
959
+ repo_id, workflow, sdk_state, # Workflow State variables
960
+ iframe, run_txt, build_txt, # UI outputs whose current values are needed by the generator
961
+ debug_attempts, app_description, repo_name_state, generated_code_state # Other State variables
962
  ],
963
+ # Outputs are updated by the values yielded from the generator
964
  outputs=[
965
+ chatbot, # Update Chatbot with new messages
966
+ repo_id, workflow, # Update workflow State variables
967
+ iframe, run_txt, build_txt, # Update UI outputs
968
+ debug_attempts, app_description, repo_name_state, generated_code_state # Update other State variables
969
  ]
970
+ ).success( # Chain a .success() event to run *after* the .click() handler completes without error
971
+ # Clear the user input textbox after the message is sent and processed
972
  lambda: gr.update(value=""),
973
  inputs=None,
974
+ outputs=user_input # Update the user input textbox
975
  )
976
 
977
+ # Link State variables' changes to UI status displays (reactive updates)
978
+ # When the 'workflow' state variable changes, update the text in status_text
979
  workflow.change(lambda s: s, inputs=workflow, outputs=status_text)
980
+ # When the 'repo_id' state variable changes, update the text in repo_id_text
981
  repo_id.change(lambda r: r if r else "None", inputs=repo_id, outputs=repo_id_text)
982
 
983
 
984
+ # Add an initial welcome message to the chatbot when the UI loads
985
  def greet():
986
  return [{"role": "assistant", "content": "Welcome! Please log in to Hugging Face and provide your Google AI Studio API key to start building Spaces. Once ready, type 'generate me a gradio app called myapp' or 'create' to begin."}]
987
 
 
989
 
990
 
991
  if __name__ == "__main__":
992
+ # Optional: Configure retries for huggingface_hub requests to make them more robust
993
  # from requests.adapters import HTTPAdapter
994
  # from urllib3.util.retry import Retry
995
+ # retry_strategy = Retry(total=5, backoff_factor=1, status_forcelist=[429, 500, 502, 503, 504]) # Define retry strategy for specific HTTP codes
996
  # adapter = HTTPAdapter(max_retries=retry_strategy)
997
+ # session = get_session() # Get the session object used internally by huggingface_hub
998
+ # session.mount("http://", adapter)
999
+ # session.mount("https://", adapter)
1000
+
1001
+ # Optional: Configure Gradio settings using environment variables
1002
+ # Set max upload size (e.g., 100MB) for files like app.py
1003
+ os.environ["GRADIO_MAX_FILE_SIZE"] = "100MB"
1004
+ # Optional: Set a local temporary directory for Gradio uploads
1005
+ os.environ["GRADIO_TEMP_DIR"] = "./tmp"
1006
+ os.makedirs(os.environ["GRADIO_TEMP_DIR"], exist_ok=True) # Ensure the directory exists
1007
+
1008
+ # Launch the Gradio UI
1009
+ ai_builder_tab.launch()