wuhp commited on
Commit
d601a02
·
verified ·
1 Parent(s): ed27e39

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -15
app.py CHANGED
@@ -6,7 +6,7 @@ from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_st
6
  from google import genai
7
  from google.genai.types import Tool, GenerateContentConfig, GoogleSearch
8
 
9
- # — JSON spec model
10
 
11
  class RepoSpec(BaseModel):
12
  repo_name: str
@@ -52,7 +52,7 @@ def fetch_logs(repo_id: str, level: str):
52
  continue
53
  return "\n".join(lines)
54
 
55
- # — CORE LOOP: ask LLM for structured JSON, write & deploy
56
 
57
  def handle_user_message(
58
  history, # list of {"role","content"} dicts
@@ -63,13 +63,13 @@ def handle_user_message(
63
  profile: gr.OAuthProfile | None,
64
  oauth_token: gr.OAuthToken | None
65
  ):
66
- # require login
67
  if not (profile and oauth_token):
68
  return history + [{"role":"assistant","content":"⚠️ Please log in first."}], "", "", "<p>No Space yet.</p>"
69
 
70
  client = genai.Client(api_key=gemini_api_key)
71
 
72
- # system instructions
73
  system_msg = {
74
  "role":"system",
75
  "content":(
@@ -84,14 +84,14 @@ def handle_user_message(
84
  )
85
  }
86
 
87
- # build the chat context
88
  chat = [system_msg] + history + [{"role":"user", "content":user_prompt}]
89
 
90
  repo_id = None
91
  build_logs = run_logs = ""
92
 
93
  for _ in range(5):
94
- # detect sdk_version at runtime
95
  if sdk_choice == "gradio":
96
  import gradio as _gr; sdk_version = _gr.__version__
97
  else:
@@ -105,7 +105,7 @@ def handle_user_message(
105
  response_schema=RepoSpec
106
  )
107
 
108
- # call the LLM
109
  resp = client.models.generate_content(
110
  model="gemini-2.5-flash-preview-04-17",
111
  contents=[m["content"] for m in chat],
@@ -115,7 +115,7 @@ def handle_user_message(
115
  repo_name = spec.repo_name
116
  files = spec.files
117
 
118
- # prepare repo
119
  repo_id = f"{profile.username}/{repo_name}"
120
  create_repo(
121
  repo_id=repo_id,
@@ -125,9 +125,9 @@ def handle_user_message(
125
  space_sdk=sdk_choice
126
  )
127
 
128
- # write & upload all files
129
  for fn, content in files.items():
130
- # replace placeholder in README
131
  if fn.lower() == "readme.md":
132
  content = content.replace("<SDK_VERSION>", sdk_version)
133
  with open(fn, "w") as f:
@@ -140,13 +140,15 @@ def handle_user_message(
140
  repo_type="space"
141
  )
142
 
143
- # fetch logs
144
  build_logs = fetch_logs(repo_id, "build")
145
  run_logs = fetch_logs(repo_id, "run")
 
 
146
  if "ERROR" not in build_logs.upper() and "ERROR" not in run_logs.upper():
147
  break
148
 
149
- # feed errors back
150
  chat.append({
151
  "role":"user",
152
  "content":(
@@ -157,6 +159,7 @@ def handle_user_message(
157
  })
158
  time.sleep(2)
159
 
 
160
  messages = [{"role":m["role"], "content":m["content"]} for m in chat if m["role"]!="system"]
161
  iframe = f'<iframe src="https://huggingface.co/spaces/{repo_id}" width="100%" height="500px"></iframe>'
162
  return messages, build_logs, run_logs, iframe
@@ -165,10 +168,11 @@ def handle_user_message(
165
 
166
  with gr.Blocks(title="HF Space Auto‑Builder") as demo:
167
  gr.Markdown("## Sign in + Auto‑Build Spaces\n\n"
168
- "1. Sign in  2. Prompt  3. Deploy & Debug\n\n"
169
- "_LLM controls filenames, code, README, requirements, and iterates until successful._\n\n---")
170
 
171
- login_btn = gr.LoginButton("huggingface", size="lg")
 
172
  status_md = gr.Markdown("*Not logged in.*")
173
  models_md = gr.Markdown()
174
  demo.load(show_profile, None, status_md)
@@ -176,10 +180,12 @@ with gr.Blocks(title="HF Space Auto‑Builder") as demo:
176
  login_btn.click(show_profile, None, status_md)
177
  login_btn.click(list_private_models, None, models_md)
178
 
 
179
  sdk_choice = gr.Radio(["gradio","streamlit"], "gradio", label="SDK")
180
  api_key = gr.Textbox(label="Gemini API Key", type="password")
181
  grounding = gr.Checkbox(label="Enable grounding")
182
 
 
183
  chatbot = gr.Chatbot(type="messages")
184
  user_in = gr.Textbox(label="Prompt", placeholder="e.g. Build a CSV inspector…")
185
  send_btn = gr.Button("Send")
 
6
  from google import genai
7
  from google.genai.types import Tool, GenerateContentConfig, GoogleSearch
8
 
9
+ # — JSON SPEC MODEL
10
 
11
  class RepoSpec(BaseModel):
12
  repo_name: str
 
52
  continue
53
  return "\n".join(lines)
54
 
55
+ # — CORE LOOP: ASK LLM FOR STRUCTURED JSON, WRITE & DEPLOY
56
 
57
  def handle_user_message(
58
  history, # list of {"role","content"} dicts
 
63
  profile: gr.OAuthProfile | None,
64
  oauth_token: gr.OAuthToken | None
65
  ):
66
+ # Require login
67
  if not (profile and oauth_token):
68
  return history + [{"role":"assistant","content":"⚠️ Please log in first."}], "", "", "<p>No Space yet.</p>"
69
 
70
  client = genai.Client(api_key=gemini_api_key)
71
 
72
+ # System instructions
73
  system_msg = {
74
  "role":"system",
75
  "content":(
 
84
  )
85
  }
86
 
87
+ # Build chat context
88
  chat = [system_msg] + history + [{"role":"user", "content":user_prompt}]
89
 
90
  repo_id = None
91
  build_logs = run_logs = ""
92
 
93
  for _ in range(5):
94
+ # Detect SDK version at runtime
95
  if sdk_choice == "gradio":
96
  import gradio as _gr; sdk_version = _gr.__version__
97
  else:
 
105
  response_schema=RepoSpec
106
  )
107
 
108
+ # Call the LLM
109
  resp = client.models.generate_content(
110
  model="gemini-2.5-flash-preview-04-17",
111
  contents=[m["content"] for m in chat],
 
115
  repo_name = spec.repo_name
116
  files = spec.files
117
 
118
+ # Prepare repo
119
  repo_id = f"{profile.username}/{repo_name}"
120
  create_repo(
121
  repo_id=repo_id,
 
125
  space_sdk=sdk_choice
126
  )
127
 
128
+ # Write & upload all files
129
  for fn, content in files.items():
130
+ # Replace placeholder in README.md
131
  if fn.lower() == "readme.md":
132
  content = content.replace("<SDK_VERSION>", sdk_version)
133
  with open(fn, "w") as f:
 
140
  repo_type="space"
141
  )
142
 
143
+ # Fetch logs
144
  build_logs = fetch_logs(repo_id, "build")
145
  run_logs = fetch_logs(repo_id, "run")
146
+
147
+ # Stop if clean
148
  if "ERROR" not in build_logs.upper() and "ERROR" not in run_logs.upper():
149
  break
150
 
151
+ # Feed errors back
152
  chat.append({
153
  "role":"user",
154
  "content":(
 
159
  })
160
  time.sleep(2)
161
 
162
+ # Prepare UI outputs
163
  messages = [{"role":m["role"], "content":m["content"]} for m in chat if m["role"]!="system"]
164
  iframe = f'<iframe src="https://huggingface.co/spaces/{repo_id}" width="100%" height="500px"></iframe>'
165
  return messages, build_logs, run_logs, iframe
 
168
 
169
  with gr.Blocks(title="HF Space Auto‑Builder") as demo:
170
  gr.Markdown("## Sign in + Auto‑Build Spaces\n\n"
171
+ "1. Sign in   2. Prompt   3. Deploy & Debug\n\n"
172
+ "_The LLM controls filenames, code, README, requirements, and loops until successful._\n\n---")
173
 
174
+ # Login
175
+ login_btn = gr.LoginButton(variant="huggingface", size="lg")
176
  status_md = gr.Markdown("*Not logged in.*")
177
  models_md = gr.Markdown()
178
  demo.load(show_profile, None, status_md)
 
180
  login_btn.click(show_profile, None, status_md)
181
  login_btn.click(list_private_models, None, models_md)
182
 
183
+ # Controls
184
  sdk_choice = gr.Radio(["gradio","streamlit"], "gradio", label="SDK")
185
  api_key = gr.Textbox(label="Gemini API Key", type="password")
186
  grounding = gr.Checkbox(label="Enable grounding")
187
 
188
+ # Chat + Outputs
189
  chatbot = gr.Chatbot(type="messages")
190
  user_in = gr.Textbox(label="Prompt", placeholder="e.g. Build a CSV inspector…")
191
  send_btn = gr.Button("Send")