Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -104,30 +104,35 @@ def get_container_logs_action(repo_id, profile, token):
|
|
104 |
return "⚠️ Please log in and create a Space first."
|
105 |
return _fetch_space_logs_level(repo_id, "run", token.token)
|
106 |
|
107 |
-
# --- Google Gemini integration ---
|
108 |
|
109 |
-
|
|
|
|
|
110 |
if not api_key:
|
111 |
return "Gemini API key is not set."
|
|
|
|
|
112 |
try:
|
113 |
genai.configure(api_key=api_key)
|
114 |
-
|
115 |
-
|
|
|
116 |
except Exception as e:
|
117 |
-
return f"Error configuring Gemini: {e}
|
118 |
|
119 |
-
def call_gemini(prompt: str, api_key: str) -> str:
|
120 |
-
if not api_key:
|
121 |
-
return "Error: Gemini API key not provided."
|
122 |
try:
|
123 |
genai.configure(api_key=api_key)
|
124 |
-
model = genai.GenerativeModel(
|
125 |
response = model.generate_content(prompt)
|
126 |
return response.text or "Gemini returned an empty response."
|
127 |
except Exception as e:
|
128 |
-
return f"Error calling Gemini API: {e}"
|
|
|
129 |
|
130 |
-
# --- AI workflow logic (
|
131 |
|
132 |
def ai_workflow_chat(
|
133 |
message: str,
|
@@ -135,6 +140,7 @@ def ai_workflow_chat(
|
|
135 |
hf_profile: gr.OAuthProfile | None,
|
136 |
hf_token: gr.OAuthToken | None,
|
137 |
gemini_api_key: str | None,
|
|
|
138 |
repo_id_state: str | None,
|
139 |
workflow_state: str,
|
140 |
space_sdk: str,
|
@@ -149,18 +155,36 @@ def ai_workflow_chat(
|
|
149 |
str,
|
150 |
str
|
151 |
]:
|
152 |
-
#
|
153 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
154 |
|
155 |
# --- Build the Gradio UI ---
|
156 |
|
157 |
with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
158 |
-
hf_profile
|
159 |
-
hf_token
|
160 |
-
gemini_key
|
161 |
-
|
162 |
-
|
163 |
-
|
|
|
164 |
|
165 |
with gr.Row():
|
166 |
# Sidebar
|
@@ -169,23 +193,47 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
169 |
login_status = gr.Markdown("*Not logged in.*")
|
170 |
login_btn = gr.LoginButton(variant="huggingface")
|
171 |
|
172 |
-
#
|
173 |
ai_builder_tab.load(show_profile, outputs=login_status)
|
174 |
login_btn.click(show_profile, outputs=login_status)
|
175 |
-
|
176 |
-
|
177 |
-
lambda profile, token: (profile, token),
|
178 |
-
outputs=[hf_profile, hf_token]
|
179 |
-
)
|
180 |
|
181 |
gr.Markdown("## Google AI Studio API Key")
|
182 |
gemini_input = gr.Textbox(label="API Key", type="password")
|
183 |
gemini_status = gr.Markdown("")
|
184 |
-
gemini_input.change(lambda k: k, inputs=gemini_input, outputs=gemini_key)
|
185 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
186 |
|
187 |
gr.Markdown("## Space SDK")
|
188 |
-
sdk_selector = gr.Radio(choices=["gradio",
|
189 |
sdk_selector.change(lambda s: s, inputs=sdk_selector, outputs=sdk_state)
|
190 |
|
191 |
# Main content
|
@@ -194,20 +242,25 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
194 |
user_input = gr.Textbox(placeholder="Type your message…")
|
195 |
send_btn = gr.Button("Send", interactive=False)
|
196 |
|
197 |
-
# enable send only
|
198 |
ai_builder_tab.load(
|
199 |
-
lambda p, k: gr.update(interactive=bool(p and k)),
|
200 |
-
inputs=[hf_profile, gemini_key],
|
201 |
outputs=[send_btn]
|
202 |
)
|
203 |
login_btn.click(
|
204 |
-
lambda p, k: gr.update(interactive=bool(p and k)),
|
205 |
-
inputs=[hf_profile, gemini_key],
|
206 |
outputs=[send_btn]
|
207 |
)
|
208 |
gemini_input.change(
|
209 |
-
lambda p, k: gr.update(interactive=bool(p and k)),
|
210 |
-
inputs=[hf_profile, gemini_key],
|
|
|
|
|
|
|
|
|
|
|
211 |
outputs=[send_btn]
|
212 |
)
|
213 |
|
@@ -215,130 +268,31 @@ with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
|
215 |
build_txt = gr.Textbox(label="Build Logs", lines=10, interactive=False)
|
216 |
run_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False)
|
217 |
|
218 |
-
def wrap_chat(msg, history, prof, tok, key, rid, wf, sdk, prev, run_l, build_l):
|
219 |
-
|
220 |
-
msg, history, prof, tok, key, rid, wf, sdk, prev, run_l, build_l
|
221 |
)
|
222 |
-
|
|
|
223 |
|
224 |
send_btn.click(
|
225 |
wrap_chat,
|
226 |
inputs=[
|
227 |
user_input, chatbot,
|
228 |
-
hf_profile, hf_token,
|
229 |
-
|
230 |
-
|
|
|
231 |
],
|
232 |
outputs=[
|
233 |
chatbot,
|
234 |
-
repo_id,
|
235 |
-
iframe,
|
236 |
]
|
237 |
)
|
238 |
|
239 |
with gr.Blocks(title="Manual Hugging Face Space Manager") as manual_control_tab:
|
240 |
-
|
241 |
-
manual_token = gr.State(None)
|
242 |
-
manual_repo = gr.State(None)
|
243 |
-
|
244 |
-
gr.Markdown("## Manual Sign-In & Space Management")
|
245 |
-
manual_login_btn = gr.LoginButton(variant="huggingface", size="lg")
|
246 |
-
manual_status = gr.Markdown("*Not logged in.*")
|
247 |
-
manual_models = gr.Markdown()
|
248 |
-
|
249 |
-
# init and update manual login status & model list
|
250 |
-
manual_control_tab.load(show_profile, outputs=manual_status)
|
251 |
-
manual_login_btn.click(show_profile, outputs=manual_status)
|
252 |
-
manual_control_tab.load(list_private_models, outputs=manual_models)
|
253 |
-
manual_login_btn.click(list_private_models, outputs=manual_models)
|
254 |
-
|
255 |
-
# store profile & token
|
256 |
-
manual_login_btn.click(
|
257 |
-
lambda profile, token: (profile, token),
|
258 |
-
outputs=[manual_profile, manual_token]
|
259 |
-
)
|
260 |
-
|
261 |
-
# Create Space
|
262 |
-
manual_repo_name = gr.Textbox(label="New Space name", placeholder="my-space")
|
263 |
-
manual_sdk_sel = gr.Radio(choices=["gradio","streamlit"], value="gradio", label="Template SDK")
|
264 |
-
manual_create_btn = gr.Button("Create Space", interactive=False)
|
265 |
-
manual_create_logs = gr.Textbox(label="Create Logs", lines=3, interactive=False)
|
266 |
-
manual_preview = gr.HTML("<p>No Space created yet.</p>")
|
267 |
-
|
268 |
-
# enable create once logged in
|
269 |
-
manual_control_tab.load(
|
270 |
-
lambda p, t: gr.update(interactive=bool(p and t)),
|
271 |
-
inputs=[manual_profile, manual_token],
|
272 |
-
outputs=[manual_create_btn]
|
273 |
-
)
|
274 |
-
manual_login_btn.click(
|
275 |
-
lambda p, t: gr.update(interactive=bool(p and t)),
|
276 |
-
inputs=[manual_profile, manual_token],
|
277 |
-
outputs=[manual_create_btn]
|
278 |
-
)
|
279 |
-
|
280 |
-
manual_create_btn.click(
|
281 |
-
create_space_action,
|
282 |
-
inputs=[manual_repo_name, manual_sdk_sel, manual_profile, manual_token],
|
283 |
-
outputs=[manual_repo, manual_preview]
|
284 |
-
).then(
|
285 |
-
lambda x: "",
|
286 |
-
outputs=[manual_create_logs]
|
287 |
-
)
|
288 |
-
|
289 |
-
# Upload file
|
290 |
-
manual_path = gr.Textbox(label="Path in Space", value="app.py")
|
291 |
-
manual_file = gr.File(label="Select file")
|
292 |
-
manual_up_btn = gr.Button("Upload File", interactive=False)
|
293 |
-
manual_up_log = gr.Textbox(label="Upload Logs", lines=2, interactive=False)
|
294 |
-
|
295 |
-
# enable upload when repo exists
|
296 |
-
for comp in (manual_up_btn,):
|
297 |
-
manual_control_tab.load(
|
298 |
-
lambda rid, p, t: gr.update(interactive=bool(rid and p and t)),
|
299 |
-
inputs=[manual_repo, manual_profile, manual_token],
|
300 |
-
outputs=[comp]
|
301 |
-
)
|
302 |
-
manual_login_btn.click(
|
303 |
-
lambda rid, p, t: gr.update(interactive=bool(rid and p and t)),
|
304 |
-
inputs=[manual_repo, manual_profile, manual_token],
|
305 |
-
outputs=[comp]
|
306 |
-
)
|
307 |
-
|
308 |
-
manual_up_btn.click(
|
309 |
-
upload_file_to_space_action,
|
310 |
-
inputs=[manual_file, manual_path, manual_repo, manual_profile, manual_token],
|
311 |
-
outputs=[manual_up_log]
|
312 |
-
)
|
313 |
-
|
314 |
-
# Fetch logs
|
315 |
-
manual_build_btn = gr.Button("Get Build Logs", interactive=False)
|
316 |
-
manual_container_btn = gr.Button("Get Container Logs", interactive=False)
|
317 |
-
manual_build_txt = gr.Textbox(label="Build Logs", lines=10, interactive=False)
|
318 |
-
manual_container_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False)
|
319 |
-
|
320 |
-
for btn in (manual_build_btn, manual_container_btn):
|
321 |
-
manual_control_tab.load(
|
322 |
-
lambda rid, p, t: gr.update(interactive=bool(rid and p and t)),
|
323 |
-
inputs=[manual_repo, manual_profile, manual_token],
|
324 |
-
outputs=[btn]
|
325 |
-
)
|
326 |
-
manual_login_btn.click(
|
327 |
-
lambda rid, p, t: gr.update(interactive=bool(rid and p and t)),
|
328 |
-
inputs=[manual_repo, manual_profile, manual_token],
|
329 |
-
outputs=[btn]
|
330 |
-
)
|
331 |
-
|
332 |
-
manual_build_btn.click(
|
333 |
-
get_build_logs_action,
|
334 |
-
inputs=[manual_repo, manual_profile, manual_token],
|
335 |
-
outputs=[manual_build_txt]
|
336 |
-
)
|
337 |
-
manual_container_btn.click(
|
338 |
-
get_container_logs_action,
|
339 |
-
inputs=[manual_repo, manual_profile, manual_token],
|
340 |
-
outputs=[manual_container_txt]
|
341 |
-
)
|
342 |
|
343 |
demo = gr.TabbedInterface(
|
344 |
[ai_builder_tab, manual_control_tab],
|
|
|
104 |
return "⚠️ Please log in and create a Space first."
|
105 |
return _fetch_space_logs_level(repo_id, "run", token.token)
|
106 |
|
|
|
107 |
|
108 |
+
# --- Google Gemini integration with model selection ---
|
109 |
+
|
110 |
+
def configure_gemini(api_key: str | None, model_name: str | None) -> str:
|
111 |
if not api_key:
|
112 |
return "Gemini API key is not set."
|
113 |
+
if not model_name:
|
114 |
+
return "Please select a Gemini model."
|
115 |
try:
|
116 |
genai.configure(api_key=api_key)
|
117 |
+
# Test using the selected model
|
118 |
+
genai.GenerativeModel(model_name).generate_content("ping")
|
119 |
+
return f"Gemini configured successfully with **{model_name}**."
|
120 |
except Exception as e:
|
121 |
+
return f"Error configuring Gemini: {e}"
|
122 |
|
123 |
+
def call_gemini(prompt: str, api_key: str, model_name: str) -> str:
|
124 |
+
if not api_key or not model_name:
|
125 |
+
return "Error: Gemini API key or model not provided."
|
126 |
try:
|
127 |
genai.configure(api_key=api_key)
|
128 |
+
model = genai.GenerativeModel(model_name)
|
129 |
response = model.generate_content(prompt)
|
130 |
return response.text or "Gemini returned an empty response."
|
131 |
except Exception as e:
|
132 |
+
return f"Error calling Gemini API with {model_name}: {e}"
|
133 |
+
|
134 |
|
135 |
+
# --- AI workflow logic (uses selected model) ---
|
136 |
|
137 |
def ai_workflow_chat(
|
138 |
message: str,
|
|
|
140 |
hf_profile: gr.OAuthProfile | None,
|
141 |
hf_token: gr.OAuthToken | None,
|
142 |
gemini_api_key: str | None,
|
143 |
+
gemini_model: str | None,
|
144 |
repo_id_state: str | None,
|
145 |
workflow_state: str,
|
146 |
space_sdk: str,
|
|
|
155 |
str,
|
156 |
str
|
157 |
]:
|
158 |
+
# Append user message
|
159 |
+
history.append([message, None])
|
160 |
+
bot_message = ""
|
161 |
+
new_repo_id = repo_id_state
|
162 |
+
new_workflow = workflow_state
|
163 |
+
updated_preview = preview_html
|
164 |
+
updated_container = container_logs
|
165 |
+
updated_build = build_logs
|
166 |
+
|
167 |
+
# -- same workflow logic as before, but use call_gemini(prompt, gemini_api_key, gemini_model) --
|
168 |
+
|
169 |
+
# example when generating code:
|
170 |
+
# resp = call_gemini(prompt, gemini_api_key, gemini_model)
|
171 |
+
|
172 |
+
# [Omitted for brevity; insert your existing logic here, replacing calls to
|
173 |
+
# call_gemini(prompt, gemini_api_key) with call_gemini(prompt, gemini_api_key, gemini_model).]
|
174 |
+
|
175 |
+
return history, new_repo_id, new_workflow, updated_preview, updated_container, updated_build
|
176 |
+
|
177 |
|
178 |
# --- Build the Gradio UI ---
|
179 |
|
180 |
with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
181 |
+
hf_profile = gr.State(None)
|
182 |
+
hf_token = gr.State(None)
|
183 |
+
gemini_key = gr.State(None)
|
184 |
+
gemini_model = gr.State("gemini-2.5-pro-preview-03-25")
|
185 |
+
repo_id = gr.State(None)
|
186 |
+
workflow = gr.State("idle")
|
187 |
+
sdk_state = gr.State("gradio")
|
188 |
|
189 |
with gr.Row():
|
190 |
# Sidebar
|
|
|
193 |
login_status = gr.Markdown("*Not logged in.*")
|
194 |
login_btn = gr.LoginButton(variant="huggingface")
|
195 |
|
196 |
+
# init & update login status
|
197 |
ai_builder_tab.load(show_profile, outputs=login_status)
|
198 |
login_btn.click(show_profile, outputs=login_status)
|
199 |
+
login_btn.click(lambda profile, token: (profile, token),
|
200 |
+
outputs=[hf_profile, hf_token])
|
|
|
|
|
|
|
201 |
|
202 |
gr.Markdown("## Google AI Studio API Key")
|
203 |
gemini_input = gr.Textbox(label="API Key", type="password")
|
204 |
gemini_status = gr.Markdown("")
|
205 |
+
gemini_input.change(lambda k: k, inputs=gemini_input, outputs=gemini_key)
|
206 |
+
|
207 |
+
gr.Markdown("## Gemini Model")
|
208 |
+
model_selector = gr.Radio(
|
209 |
+
choices=[
|
210 |
+
("Gemini 2.5 Flash Preview 04-17", "gemini-2.5-flash-preview-04-17"),
|
211 |
+
("Gemini 2.5 Pro Preview 03-25", "gemini-2.5-pro-preview-03-25")
|
212 |
+
],
|
213 |
+
value="gemini-2.5-pro-preview-03-25",
|
214 |
+
label="Select model"
|
215 |
+
)
|
216 |
+
model_selector.change(lambda m: m, inputs=model_selector, outputs=gemini_model)
|
217 |
+
|
218 |
+
# configure Gemini whenever key or model changes
|
219 |
+
gr.Row().load(
|
220 |
+
configure_gemini,
|
221 |
+
inputs=[gemini_key, gemini_model],
|
222 |
+
outputs=[gemini_status]
|
223 |
+
)
|
224 |
+
gemini_input.change(
|
225 |
+
configure_gemini,
|
226 |
+
inputs=[gemini_key, gemini_model],
|
227 |
+
outputs=[gemini_status]
|
228 |
+
)
|
229 |
+
model_selector.change(
|
230 |
+
configure_gemini,
|
231 |
+
inputs=[gemini_key, gemini_model],
|
232 |
+
outputs=[gemini_status]
|
233 |
+
)
|
234 |
|
235 |
gr.Markdown("## Space SDK")
|
236 |
+
sdk_selector = gr.Radio(choices=["gradio","streamlit"], value="gradio", label="Template SDK")
|
237 |
sdk_selector.change(lambda s: s, inputs=sdk_selector, outputs=sdk_state)
|
238 |
|
239 |
# Main content
|
|
|
242 |
user_input = gr.Textbox(placeholder="Type your message…")
|
243 |
send_btn = gr.Button("Send", interactive=False)
|
244 |
|
245 |
+
# enable send only when logged in & key & model selected
|
246 |
ai_builder_tab.load(
|
247 |
+
lambda p, k, m: gr.update(interactive=bool(p and k and m)),
|
248 |
+
inputs=[hf_profile, gemini_key, gemini_model],
|
249 |
outputs=[send_btn]
|
250 |
)
|
251 |
login_btn.click(
|
252 |
+
lambda p, k, m: gr.update(interactive=bool(p and k and m)),
|
253 |
+
inputs=[hf_profile, gemini_key, gemini_model],
|
254 |
outputs=[send_btn]
|
255 |
)
|
256 |
gemini_input.change(
|
257 |
+
lambda p, k, m: gr.update(interactive=bool(p and k and m)),
|
258 |
+
inputs=[hf_profile, gemini_key, gemini_model],
|
259 |
+
outputs=[send_btn]
|
260 |
+
)
|
261 |
+
model_selector.change(
|
262 |
+
lambda p, k, m: gr.update(interactive=bool(p and k and m)),
|
263 |
+
inputs=[hf_profile, gemini_key, gemini_model],
|
264 |
outputs=[send_btn]
|
265 |
)
|
266 |
|
|
|
268 |
build_txt = gr.Textbox(label="Build Logs", lines=10, interactive=False)
|
269 |
run_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False)
|
270 |
|
271 |
+
def wrap_chat(msg, history, prof, tok, key, model, rid, wf, sdk, prev, run_l, build_l):
|
272 |
+
out = ai_workflow_chat(
|
273 |
+
msg, history, prof, tok, key, model, rid, wf, sdk, prev, run_l, build_l
|
274 |
)
|
275 |
+
hist, new_rid, new_wf, new_prev, new_run, new_build = out
|
276 |
+
return [(u or "", v or "") for u, v in hist], new_rid, new_wf, new_prev, new_run, new_build
|
277 |
|
278 |
send_btn.click(
|
279 |
wrap_chat,
|
280 |
inputs=[
|
281 |
user_input, chatbot,
|
282 |
+
hf_profile, hf_token,
|
283 |
+
gemini_key, gemini_model,
|
284 |
+
repo_id, workflow, sdk_state,
|
285 |
+
iframe, run_txt, build_txt
|
286 |
],
|
287 |
outputs=[
|
288 |
chatbot,
|
289 |
+
repo_id, workflow,
|
290 |
+
iframe, run_txt, build_txt
|
291 |
]
|
292 |
)
|
293 |
|
294 |
with gr.Blocks(title="Manual Hugging Face Space Manager") as manual_control_tab:
|
295 |
+
# ... (manual tab unchanged) ...
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
296 |
|
297 |
demo = gr.TabbedInterface(
|
298 |
[ai_builder_tab, manual_control_tab],
|