wuhp commited on
Commit
c338ef9
·
verified ·
1 Parent(s): 7f5a4d4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +60 -55
app.py CHANGED
@@ -1,10 +1,29 @@
1
  import gradio as gr
2
  import json, time
3
- from huggingface_hub import create_repo, upload_file, constants
4
  from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status
5
- from google import genai # Gemini Python SDK
6
  from google.genai.types import Tool, GenerateContentConfig, GoogleSearch
7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  # — HELPERS FOR HF SPACE LOGS —
9
 
10
  def _get_space_jwt(repo_id: str):
@@ -23,7 +42,9 @@ def fetch_logs(repo_id: str, level: str):
23
  if raw.startswith(b"data: "):
24
  try:
25
  ev = json.loads(raw[len(b"data: "):].decode())
26
- lines.append(f"[{ev.get('timestamp','')}] {ev.get('data','')}")
 
 
27
  except:
28
  continue
29
  return "\n".join(lines)
@@ -31,19 +52,18 @@ def fetch_logs(repo_id: str, level: str):
31
  # — CORE LOOP: send prompt & (iteratively) deploy —
32
 
33
  def handle_user_message(
34
- history, # list of {"role","content"} dicts
35
  sdk_choice: str,
36
  gemini_api_key: str,
37
  grounding_enabled: bool,
38
  profile: gr.OAuthProfile | None,
39
  oauth_token: gr.OAuthToken | None
40
  ):
41
- # require login
42
  if profile is None or oauth_token is None:
43
  return history + [{"role":"assistant","content":"⚠️ Please log in first."}], "", "", "<p>No Space yet.</p>"
44
 
45
- genai_client = genai.Client(api_key=gemini_api_key)
46
- chat = [{
47
  "role":"system",
48
  "content":(
49
  f"You are an AI assistant that writes a HuggingFace Space using the "
@@ -52,58 +72,46 @@ def handle_user_message(
52
  )
53
  }] + history
54
 
55
- code_filename = "app.py" if sdk_choice=="gradio" else "streamlit_app.py"
56
- config_filename = "README.md"
57
- reqs_filename = "requirements.txt"
58
- repo_id = f"{profile.username}/{profile.username}-auto-space"
59
 
60
  build_logs = run_logs = ""
61
  for _ in range(5):
62
- # build tools for grounding
63
  tools = []
64
  if grounding_enabled:
65
  tools.append(Tool(google_search=GoogleSearch()))
66
- config = GenerateContentConfig(tools=tools, response_modalities=["TEXT"])
67
 
68
- # call Gemini
69
- resp = genai_client.models.generate_content(
70
  model="gemini-2.5-flash-preview-04-17",
71
  contents=[m["content"] for m in chat],
72
- config=config
73
  )
74
- ai_code = resp.text
75
- chat.append({"role":"assistant", "content": ai_code})
76
-
77
- # write code file
78
- with open(code_filename, "w") as f:
79
- f.write(ai_code)
80
 
81
- # write README.md with correct app_file
82
- readme = f"""---
 
 
83
  title: Wuhp Auto Space
84
  emoji: 🐢
85
  colorFrom: red
86
  colorTo: pink
87
  sdk: {sdk_choice}
88
  sdk_version: 1.44.1
89
- app_file: {code_filename}
90
  pinned: false
91
  ---
92
 
93
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
94
- """
95
- with open(config_filename, "w") as f:
96
- f.write(readme)
97
-
98
- # write requirements.txt
99
- if sdk_choice == "streamlit":
100
- reqs = "streamlit\npandas\n"
101
- else:
102
- reqs = "gradio\npandas\n"
103
- with open(reqs_filename, "w") as f:
104
- f.write(reqs)
105
-
106
- # create/update Space and upload all three files
107
  create_repo(
108
  repo_id=repo_id,
109
  token=oauth_token.token,
@@ -111,7 +119,7 @@ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-
111
  repo_type="space",
112
  space_sdk=sdk_choice
113
  )
114
- for fn in (code_filename, config_filename, reqs_filename):
115
  upload_file(
116
  path_or_fileobj=fn,
117
  path_in_repo=fn,
@@ -120,15 +128,12 @@ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-
120
  repo_type="space"
121
  )
122
 
123
- # fetch logs
124
  build_logs = fetch_logs(repo_id, "build")
125
  run_logs = fetch_logs(repo_id, "run")
126
 
127
- # stop if no errors
128
  if "ERROR" not in build_logs.upper() and "ERROR" not in run_logs.upper():
129
  break
130
 
131
- # feed errors back
132
  chat.append({
133
  "role":"user",
134
  "content":(
@@ -139,25 +144,25 @@ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-
139
  })
140
  time.sleep(2)
141
 
142
- # prepare outputs for gr.Chatbot(type="messages")
143
- messages = [{"role":m["role"], "content":m["content"]} for m in chat if m["role"]!="system"]
144
  iframe = f'<iframe src="https://huggingface.co/spaces/{repo_id}" width="100%" height="500px"></iframe>'
145
  return messages, build_logs, run_logs, iframe
146
 
147
  # — BUILD THE UI —
148
 
149
- def show_profile(profile):
150
- return f"✅ Logged in as **{profile.username}**" if profile else "*Not logged in.*"
151
-
152
  with gr.Blocks(title="HF Space Auto‑Builder (Gradio & Streamlit)") as demo:
153
  gr.Markdown("## Sign in + Auto‑Build Spaces\n\n"
154
- "1. Sign in\n2. Enter your prompt\n3. Watch the code, README, and requirements deploy & debug\n\n---")
 
 
 
 
 
155
 
156
- # LOGIN
157
- login_btn = gr.LoginButton(variant="huggingface", size="lg")
158
- status_md = gr.Markdown("*Not logged in.*")
159
- demo.load(show_profile, inputs=None, outputs=status_md)
160
- login_btn.click(show_profile, inputs=None, outputs=status_md)
161
 
162
  # CONTROLS
163
  sdk_choice = gr.Radio(["gradio","streamlit"], value="gradio", label="SDK template")
@@ -166,7 +171,7 @@ with gr.Blocks(title="HF Space Auto‑Builder (Gradio & Streamlit)") as demo:
166
 
167
  # CHAT + OUTPUTS
168
  chatbot = gr.Chatbot(type="messages")
169
- user_in = gr.Textbox(placeholder="e.g. Create me a CSV inspector…", label="Prompt")
170
  send_btn = gr.Button("Send")
171
 
172
  build_box = gr.Textbox(label="Build logs", lines=5, interactive=False)
@@ -175,7 +180,7 @@ with gr.Blocks(title="HF Space Auto‑Builder (Gradio & Streamlit)") as demo:
175
 
176
  send_btn.click(
177
  fn=handle_user_message,
178
- inputs=[chatbot, sdk_choice, api_key, grounding, gr.State(), gr.State()],
179
  outputs=[chatbot, build_box, run_box, preview]
180
  )
181
 
 
1
  import gradio as gr
2
  import json, time
3
+ from huggingface_hub import create_repo, upload_file, list_models, constants
4
  from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status
5
+ from google import genai
6
  from google.genai.types import Tool, GenerateContentConfig, GoogleSearch
7
 
8
+ # — USER INFO & MODEL LISTING —
9
+
10
+ def show_profile(profile: gr.OAuthProfile | None) -> str:
11
+ if profile is None:
12
+ return "*Not logged in.*"
13
+ return f"✅ Logged in as **{profile.username}**"
14
+
15
+ def list_private_models(
16
+ profile: gr.OAuthProfile | None,
17
+ oauth_token: gr.OAuthToken | None
18
+ ) -> str:
19
+ if profile is None or oauth_token is None:
20
+ return "Please log in to see your models."
21
+ models = [
22
+ f"{m.id} ({'private' if m.private else 'public'})"
23
+ for m in list_models(author=profile.username, token=oauth_token.token)
24
+ ]
25
+ return "No models found." if not models else "Models:\n\n" + "\n - ".join(models)
26
+
27
  # — HELPERS FOR HF SPACE LOGS —
28
 
29
  def _get_space_jwt(repo_id: str):
 
42
  if raw.startswith(b"data: "):
43
  try:
44
  ev = json.loads(raw[len(b"data: "):].decode())
45
+ ts = ev.get("timestamp","")
46
+ txt = ev.get("data","")
47
+ lines.append(f"[{ts}] {txt}")
48
  except:
49
  continue
50
  return "\n".join(lines)
 
52
  # — CORE LOOP: send prompt & (iteratively) deploy —
53
 
54
  def handle_user_message(
55
+ history, # list of {"role","content"} dicts
56
  sdk_choice: str,
57
  gemini_api_key: str,
58
  grounding_enabled: bool,
59
  profile: gr.OAuthProfile | None,
60
  oauth_token: gr.OAuthToken | None
61
  ):
 
62
  if profile is None or oauth_token is None:
63
  return history + [{"role":"assistant","content":"⚠️ Please log in first."}], "", "", "<p>No Space yet.</p>"
64
 
65
+ client = genai.Client(api_key=gemini_api_key)
66
+ chat = [{
67
  "role":"system",
68
  "content":(
69
  f"You are an AI assistant that writes a HuggingFace Space using the "
 
72
  )
73
  }] + history
74
 
75
+ code_fn = "app.py" if sdk_choice=="gradio" else "streamlit_app.py"
76
+ readme_fn = "README.md"
77
+ reqs_fn = "requirements.txt"
78
+ repo_id = f"{profile.username}/{profile.username}-auto-space"
79
 
80
  build_logs = run_logs = ""
81
  for _ in range(5):
 
82
  tools = []
83
  if grounding_enabled:
84
  tools.append(Tool(google_search=GoogleSearch()))
85
+ cfg = GenerateContentConfig(tools=tools, response_modalities=["TEXT"])
86
 
87
+ resp = client.models.generate_content(
 
88
  model="gemini-2.5-flash-preview-04-17",
89
  contents=[m["content"] for m in chat],
90
+ config=cfg
91
  )
92
+ code = resp.text
93
+ chat.append({"role":"assistant","content":code})
 
 
 
 
94
 
95
+ # write files
96
+ with open(code_fn, "w") as f: f.write(code)
97
+ with open(readme_fn, "w") as f:
98
+ f.write(f"""---
99
  title: Wuhp Auto Space
100
  emoji: 🐢
101
  colorFrom: red
102
  colorTo: pink
103
  sdk: {sdk_choice}
104
  sdk_version: 1.44.1
105
+ app_file: {code_fn}
106
  pinned: false
107
  ---
108
 
109
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
110
+ """)
111
+ reqs = "pandas\n" + ("streamlit\n" if sdk_choice=="streamlit" else "gradio\n")
112
+ with open(reqs_fn, "w") as f: f.write(reqs)
113
+
114
+ # deploy
 
 
 
 
 
 
 
 
115
  create_repo(
116
  repo_id=repo_id,
117
  token=oauth_token.token,
 
119
  repo_type="space",
120
  space_sdk=sdk_choice
121
  )
122
+ for fn in (code_fn, readme_fn, reqs_fn):
123
  upload_file(
124
  path_or_fileobj=fn,
125
  path_in_repo=fn,
 
128
  repo_type="space"
129
  )
130
 
 
131
  build_logs = fetch_logs(repo_id, "build")
132
  run_logs = fetch_logs(repo_id, "run")
133
 
 
134
  if "ERROR" not in build_logs.upper() and "ERROR" not in run_logs.upper():
135
  break
136
 
 
137
  chat.append({
138
  "role":"user",
139
  "content":(
 
144
  })
145
  time.sleep(2)
146
 
147
+ messages = [{"role":m["role"],"content":m["content"]} for m in chat if m["role"]!="system"]
 
148
  iframe = f'<iframe src="https://huggingface.co/spaces/{repo_id}" width="100%" height="500px"></iframe>'
149
  return messages, build_logs, run_logs, iframe
150
 
151
  # — BUILD THE UI —
152
 
 
 
 
153
  with gr.Blocks(title="HF Space Auto‑Builder (Gradio & Streamlit)") as demo:
154
  gr.Markdown("## Sign in + Auto‑Build Spaces\n\n"
155
+ "1. Sign in\n2. Enter your prompt\n3. Watch code, README, requirements, logs, and preview\n\n---")
156
+
157
+ # LOGIN & MODEL LISTING
158
+ login_btn = gr.LoginButton(variant="huggingface", size="lg")
159
+ status_md = gr.Markdown("*Not logged in.*")
160
+ models_md = gr.Markdown()
161
 
162
+ demo.load(show_profile, inputs=[login_btn], outputs=status_md)
163
+ demo.load(list_private_models, inputs=[login_btn], outputs=models_md)
164
+ login_btn.click(show_profile, inputs=[login_btn], outputs=status_md)
165
+ login_btn.click(list_private_models, inputs=[login_btn], outputs=models_md)
 
166
 
167
  # CONTROLS
168
  sdk_choice = gr.Radio(["gradio","streamlit"], value="gradio", label="SDK template")
 
171
 
172
  # CHAT + OUTPUTS
173
  chatbot = gr.Chatbot(type="messages")
174
+ user_in = gr.Textbox(placeholder="Your prompt…", label="Prompt")
175
  send_btn = gr.Button("Send")
176
 
177
  build_box = gr.Textbox(label="Build logs", lines=5, interactive=False)
 
180
 
181
  send_btn.click(
182
  fn=handle_user_message,
183
+ inputs=[chatbot, sdk_choice, api_key, grounding, login_btn],
184
  outputs=[chatbot, build_box, run_box, preview]
185
  )
186