Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,228 +1,470 @@
|
|
1 |
-
|
2 |
-
|
|
|
|
|
3 |
import json
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status
|
11 |
|
12 |
-
#
|
13 |
|
14 |
def show_profile(profile: gr.OAuthProfile | None) -> str:
|
15 |
if profile is None:
|
16 |
return "*Not logged in.*"
|
17 |
return f"✅ Logged in as **{profile.username}**"
|
18 |
|
19 |
-
def list_private_models(
|
20 |
-
profile
|
21 |
-
oauth_token: gr.OAuthToken | None
|
22 |
-
) -> str:
|
23 |
-
if profile is None or oauth_token is None:
|
24 |
return "Please log in to see your models."
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
profile: gr.OAuthProfile | None,
|
35 |
-
oauth_token: gr.OAuthToken | None
|
36 |
-
):
|
37 |
-
return gr.update(interactive=profile is not None)
|
38 |
|
39 |
-
def
|
40 |
-
repo_id: str,
|
41 |
-
profile: gr.OAuthProfile | None,
|
42 |
-
oauth_token: gr.OAuthToken | None
|
43 |
-
):
|
44 |
-
return gr.update(interactive=bool(repo_id and profile and oauth_token))
|
45 |
-
|
46 |
-
# — CORE ACTIONS —
|
47 |
-
|
48 |
-
def create_space(
|
49 |
-
repo_name: str,
|
50 |
-
sdk: str,
|
51 |
-
profile: gr.OAuthProfile | None,
|
52 |
-
oauth_token: gr.OAuthToken | None
|
53 |
-
) -> tuple[str, str, str]:
|
54 |
-
if not profile or not oauth_token:
|
55 |
-
return "", "⚠️ Please log in first.", "<p>No Space created yet.</p>"
|
56 |
repo_id = f"{profile.username}/{repo_name}"
|
57 |
create_repo(
|
58 |
repo_id=repo_id,
|
59 |
-
token=
|
60 |
exist_ok=True,
|
61 |
repo_type="space",
|
62 |
space_sdk=sdk
|
63 |
)
|
64 |
-
url
|
65 |
-
logmsg = f"✅ Space ready: {url} (SDK: {sdk})"
|
66 |
iframe = f'<iframe src="{url}" width="100%" height="500px"></iframe>'
|
67 |
-
return repo_id,
|
68 |
|
69 |
-
def
|
70 |
-
|
71 |
path_in_repo: str,
|
72 |
repo_id: str,
|
73 |
-
profile: gr.OAuthProfile
|
74 |
-
|
75 |
) -> str:
|
76 |
-
if not profile
|
77 |
-
return "⚠️ Please log in first."
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
return f"✅ Uploaded `{path_in_repo}` to `{repo_id}`"
|
90 |
|
91 |
-
def _fetch_space_logs_level(repo_id: str, level: str) -> str:
|
92 |
-
|
93 |
-
jwt_url =
|
94 |
-
r = get_session().get(jwt_url, headers=build_hf_headers())
|
95 |
hf_raise_for_status(r)
|
96 |
-
jwt
|
97 |
-
# 2) Stream logs
|
98 |
logs_url = f"https://api.hf.space/v1/{repo_id}/logs/{level}"
|
99 |
-
lines = []
|
100 |
-
with get_session().get(logs_url, headers=build_hf_headers(token=jwt), stream=True) as resp:
|
101 |
hf_raise_for_status(resp)
|
102 |
for raw in resp.iter_lines():
|
|
|
|
|
|
|
103 |
if not raw.startswith(b"data: "):
|
104 |
continue
|
105 |
payload = raw[len(b"data: "):]
|
106 |
try:
|
107 |
event = json.loads(payload.decode())
|
|
|
|
|
|
|
|
|
|
|
108 |
except json.JSONDecodeError:
|
109 |
continue
|
110 |
-
|
111 |
-
txt = event.get("data", "")
|
112 |
-
lines.append(f"[{ts}] {txt}")
|
113 |
-
return "\n".join(lines)
|
114 |
|
115 |
-
def
|
116 |
-
repo_id:
|
117 |
-
profile: gr.OAuthProfile | None,
|
118 |
-
oauth_token: gr.OAuthToken | None
|
119 |
-
) -> str:
|
120 |
-
if not (profile and oauth_token and repo_id):
|
121 |
return "⚠️ Please log in and create a Space first."
|
122 |
-
return _fetch_space_logs_level(repo_id, "build")
|
123 |
|
124 |
-
def
|
125 |
-
repo_id:
|
126 |
-
profile: gr.OAuthProfile | None,
|
127 |
-
oauth_token: gr.OAuthToken | None
|
128 |
-
) -> str:
|
129 |
-
if not (profile and oauth_token and repo_id):
|
130 |
return "⚠️ Please log in and create a Space first."
|
131 |
-
return _fetch_space_logs_level(repo_id, "run")
|
132 |
-
|
133 |
-
#
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
"
|
138 |
-
|
139 |
-
|
140 |
-
"
|
141 |
-
"
|
142 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
143 |
)
|
144 |
|
145 |
-
|
146 |
-
|
147 |
-
status_md = gr.Markdown("*Not logged in.*")
|
148 |
-
models_md = gr.Markdown()
|
149 |
-
demo.load(show_profile, inputs=None, outputs=status_md)
|
150 |
-
login_btn.click(show_profile, inputs=None, outputs=status_md)
|
151 |
-
demo.load(list_private_models, inputs=None, outputs=models_md)
|
152 |
-
login_btn.click(list_private_models,
|
153 |
-
inputs=None, outputs=models_md)
|
154 |
-
|
155 |
-
# — CREATE SPACE —
|
156 |
-
repo_name = gr.Textbox(label="New Space name", placeholder="my-space")
|
157 |
-
sdk_selector = gr.Radio(
|
158 |
choices=["gradio","streamlit"],
|
159 |
value="gradio",
|
160 |
-
label="
|
161 |
)
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
create_btn.click(
|
171 |
-
fn=create_space,
|
172 |
-
inputs=[repo_name, sdk_selector],
|
173 |
-
outputs=[session_id, create_logs, preview]
|
174 |
)
|
175 |
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
inputs=[session_id],
|
184 |
-
outputs=[upload_btn])
|
185 |
-
login_btn.click(enable_repo_actions,
|
186 |
-
inputs=[session_id],
|
187 |
-
outputs=[upload_btn])
|
188 |
-
session_id.change(enable_repo_actions,
|
189 |
-
inputs=[session_id],
|
190 |
-
outputs=[upload_btn])
|
191 |
-
|
192 |
-
upload_btn.click(
|
193 |
-
fn=upload_file_to_space,
|
194 |
-
inputs=[file_uploader, path_in_repo, session_id],
|
195 |
-
outputs=[upload_logs]
|
196 |
)
|
197 |
|
198 |
-
#
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
-
outputs=[btn])
|
209 |
-
login_btn.click(enable_repo_actions,
|
210 |
-
inputs=[session_id],
|
211 |
-
outputs=[btn])
|
212 |
-
session_id.change(enable_repo_actions,
|
213 |
-
inputs=[session_id],
|
214 |
-
outputs=[btn])
|
215 |
-
|
216 |
-
build_logs_btn.click(
|
217 |
-
fn=get_build_logs,
|
218 |
-
inputs=[session_id],
|
219 |
-
outputs=[build_logs_md]
|
220 |
)
|
221 |
-
|
222 |
-
|
223 |
-
inputs=[
|
224 |
-
outputs=[
|
|
|
|
|
|
|
|
|
|
|
|
|
225 |
)
|
226 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
227 |
if __name__ == "__main__":
|
228 |
demo.launch()
|
|
|
1 |
+
# app.py
|
2 |
+
|
3 |
+
import os
|
4 |
+
import time
|
5 |
import json
|
6 |
+
import requests
|
7 |
+
|
8 |
+
import gradio as gr
|
9 |
+
import google.generativeai as genai
|
10 |
+
|
11 |
+
from huggingface_hub import create_repo, list_models, upload_file, constants
|
12 |
from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status
|
13 |
|
14 |
+
# --- Helper functions for Hugging Face integration ---
|
15 |
|
16 |
def show_profile(profile: gr.OAuthProfile | None) -> str:
|
17 |
if profile is None:
|
18 |
return "*Not logged in.*"
|
19 |
return f"✅ Logged in as **{profile.username}**"
|
20 |
|
21 |
+
def list_private_models(profile: gr.OAuthProfile | None) -> str:
|
22 |
+
if profile is None:
|
|
|
|
|
|
|
23 |
return "Please log in to see your models."
|
24 |
+
try:
|
25 |
+
token_obj = profile._token
|
26 |
+
models = [
|
27 |
+
f"{m.id} ({'private' if m.private else 'public'})"
|
28 |
+
for m in list_models(author=profile.username, token=token_obj.token if token_obj else None)
|
29 |
+
]
|
30 |
+
return "No models found." if not models else "Models:\n\n" + "\n - ".join(models)
|
31 |
+
except Exception as e:
|
32 |
+
return f"Error listing models: {e}"
|
|
|
|
|
|
|
|
|
33 |
|
34 |
+
def create_space_action(repo_name: str, sdk: str, profile: gr.OAuthProfile, token: gr.OAuthToken):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
repo_id = f"{profile.username}/{repo_name}"
|
36 |
create_repo(
|
37 |
repo_id=repo_id,
|
38 |
+
token=token.token,
|
39 |
exist_ok=True,
|
40 |
repo_type="space",
|
41 |
space_sdk=sdk
|
42 |
)
|
43 |
+
url = f"https://huggingface.co/spaces/{repo_id}"
|
|
|
44 |
iframe = f'<iframe src="{url}" width="100%" height="500px"></iframe>'
|
45 |
+
return repo_id, iframe
|
46 |
|
47 |
+
def upload_file_to_space_action(
|
48 |
+
file_obj,
|
49 |
path_in_repo: str,
|
50 |
repo_id: str,
|
51 |
+
profile: gr.OAuthProfile,
|
52 |
+
token: gr.OAuthToken
|
53 |
) -> str:
|
54 |
+
if not (profile and token and repo_id):
|
55 |
+
return "⚠️ Please log in and create a Space first."
|
56 |
+
try:
|
57 |
+
upload_file(
|
58 |
+
path_or_fileobj=file_obj,
|
59 |
+
path_in_repo=path_in_repo,
|
60 |
+
repo_id=repo_id,
|
61 |
+
token=token.token,
|
62 |
+
repo_type="space"
|
63 |
+
)
|
64 |
+
return f"✅ Uploaded `{path_in_repo}`"
|
65 |
+
except Exception as e:
|
66 |
+
return f"Error uploading file: {e}"
|
|
|
67 |
|
68 |
+
def _fetch_space_logs_level(repo_id: str, level: str, token: str) -> str:
|
69 |
+
jwt_url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt"
|
70 |
+
r = get_session().get(jwt_url, headers=build_hf_headers(token=token))
|
|
|
71 |
hf_raise_for_status(r)
|
72 |
+
jwt = r.json()["token"]
|
|
|
73 |
logs_url = f"https://api.hf.space/v1/{repo_id}/logs/{level}"
|
74 |
+
lines, count = [], 0
|
75 |
+
with get_session().get(logs_url, headers=build_hf_headers(token=jwt), stream=True, timeout=20) as resp:
|
76 |
hf_raise_for_status(resp)
|
77 |
for raw in resp.iter_lines():
|
78 |
+
if count >= 200:
|
79 |
+
lines.append("... truncated ...")
|
80 |
+
break
|
81 |
if not raw.startswith(b"data: "):
|
82 |
continue
|
83 |
payload = raw[len(b"data: "):]
|
84 |
try:
|
85 |
event = json.loads(payload.decode())
|
86 |
+
ts = event.get("timestamp", "")
|
87 |
+
txt = event.get("data", "").strip()
|
88 |
+
if txt:
|
89 |
+
lines.append(f"[{ts}] {txt}")
|
90 |
+
count += 1
|
91 |
except json.JSONDecodeError:
|
92 |
continue
|
93 |
+
return "\n".join(lines) if lines else f"No {level} logs found."
|
|
|
|
|
|
|
94 |
|
95 |
+
def get_build_logs_action(repo_id, profile, token):
|
96 |
+
if not (repo_id and profile and token):
|
|
|
|
|
|
|
|
|
97 |
return "⚠️ Please log in and create a Space first."
|
98 |
+
return _fetch_space_logs_level(repo_id, "build", token.token)
|
99 |
|
100 |
+
def get_container_logs_action(repo_id, profile, token):
|
101 |
+
if not (repo_id and profile and token):
|
|
|
|
|
|
|
|
|
102 |
return "⚠️ Please log in and create a Space first."
|
103 |
+
return _fetch_space_logs_level(repo_id, "run", token.token)
|
104 |
+
|
105 |
+
# --- Google Gemini integration ---
|
106 |
+
|
107 |
+
def configure_gemini(api_key: str | None) -> str:
|
108 |
+
if not api_key:
|
109 |
+
return "Gemini API key is not set."
|
110 |
+
try:
|
111 |
+
genai.configure(api_key=api_key)
|
112 |
+
genai.GenerativeModel("gemini-pro").generate_content("ping")
|
113 |
+
return "Gemini configured successfully."
|
114 |
+
except Exception as e:
|
115 |
+
return f"Error configuring Gemini: {e}. Please check your API key."
|
116 |
+
|
117 |
+
def call_gemini(prompt: str, api_key: str) -> str:
|
118 |
+
if not api_key:
|
119 |
+
return "Error: Gemini API key not provided."
|
120 |
+
try:
|
121 |
+
genai.configure(api_key=api_key)
|
122 |
+
model = genai.GenerativeModel("gemini-pro")
|
123 |
+
response = model.generate_content(prompt)
|
124 |
+
return response.text or "Gemini returned an empty response."
|
125 |
+
except Exception as e:
|
126 |
+
return f"Error calling Gemini API: {e}"
|
127 |
+
|
128 |
+
# --- AI workflow logic ---
|
129 |
+
|
130 |
+
def ai_workflow_chat(
|
131 |
+
message: str,
|
132 |
+
history: list[list[str | None]],
|
133 |
+
hf_profile: gr.OAuthProfile | None,
|
134 |
+
hf_token: gr.OAuthToken | None,
|
135 |
+
gemini_api_key: str | None,
|
136 |
+
repo_id_state: str | None,
|
137 |
+
workflow_state: str,
|
138 |
+
space_sdk: str,
|
139 |
+
preview_html: str,
|
140 |
+
container_logs: str,
|
141 |
+
build_logs: str
|
142 |
+
) -> tuple[
|
143 |
+
list[list[str | None]],
|
144 |
+
str | None,
|
145 |
+
str,
|
146 |
+
str,
|
147 |
+
str,
|
148 |
+
str
|
149 |
+
]:
|
150 |
+
history.append([message, None])
|
151 |
+
bot_message = ""
|
152 |
+
new_repo_id = repo_id_state
|
153 |
+
new_workflow = workflow_state
|
154 |
+
updated_preview = preview_html
|
155 |
+
updated_container = container_logs
|
156 |
+
updated_build = build_logs
|
157 |
+
|
158 |
+
try:
|
159 |
+
# Preliminary checks
|
160 |
+
if not hf_profile or not hf_token:
|
161 |
+
bot_message = "Please log in to Hugging Face first."
|
162 |
+
new_workflow = "awaiting_login"
|
163 |
+
elif not gemini_api_key:
|
164 |
+
bot_message = "Please enter your Google AI Studio API key."
|
165 |
+
new_workflow = "awaiting_api_key"
|
166 |
+
|
167 |
+
# Starting a new Space
|
168 |
+
elif (new_workflow == "idle" or "create" in message.lower()) and not new_repo_id:
|
169 |
+
bot_message = "What should the Space be called? (e.g., `my-awesome-app`)"
|
170 |
+
new_workflow = "awaiting_repo_name"
|
171 |
+
|
172 |
+
# User provides a repo name
|
173 |
+
elif new_workflow == "awaiting_repo_name":
|
174 |
+
repo_name = message.strip()
|
175 |
+
if not repo_name:
|
176 |
+
bot_message = "Please provide a valid Space name."
|
177 |
+
else:
|
178 |
+
bot_message = f"Creating Space `{hf_profile.username}/{repo_name}`..."
|
179 |
+
new_repo_id, iframe_html = create_space_action(repo_name, space_sdk, hf_profile, hf_token)
|
180 |
+
updated_preview = iframe_html
|
181 |
+
bot_message += "\n✅ Space created."
|
182 |
+
new_workflow = "awaiting_app_description"
|
183 |
+
|
184 |
+
# User describes the app or debugging
|
185 |
+
elif new_workflow in ("awaiting_app_description", "debugging"):
|
186 |
+
if new_workflow == "awaiting_app_description":
|
187 |
+
app_desc = message
|
188 |
+
bot_message = f"Generating code for a `{space_sdk}` app based on: '{app_desc}'..."
|
189 |
+
prompt = f"""
|
190 |
+
You are an AI assistant specializing in Hugging Face Spaces using the {space_sdk} SDK.
|
191 |
+
Generate a full, single-file Python app based on:
|
192 |
+
'{app_desc}'
|
193 |
+
Return **only** the code block (```python ...```).
|
194 |
+
"""
|
195 |
+
else: # debugging
|
196 |
+
debug_instr = message
|
197 |
+
logs = get_container_logs_action(new_repo_id, hf_profile, hf_token)
|
198 |
+
bot_message = f"Analyzing logs and applying fixes: '{debug_instr}'..."
|
199 |
+
prompt = f"""
|
200 |
+
You are debugging a {space_sdk} Space.
|
201 |
+
Logs:
|
202 |
+
{logs}
|
203 |
+
User instructions:
|
204 |
+
'{debug_instr}'
|
205 |
+
Generate a fixed, single-file Python app. Return only the ```python``` code block.
|
206 |
+
"""
|
207 |
+
new_workflow = "generating_code"
|
208 |
+
resp = call_gemini(prompt, gemini_api_key)
|
209 |
+
# Extract code
|
210 |
+
start = resp.find("```python")
|
211 |
+
end = resp.rfind("```")
|
212 |
+
if start != -1 and end != -1 and end > start:
|
213 |
+
code = resp[start + len("```python"):end].strip()
|
214 |
+
bot_message += "\n✅ Code generated. Uploading..."
|
215 |
+
new_workflow = "uploading_code"
|
216 |
+
upload_log = upload_file_to_space_action(code, "app.py", new_repo_id, hf_profile, hf_token)
|
217 |
+
bot_message += "\n" + upload_log
|
218 |
+
if "✅ Uploaded" in upload_log:
|
219 |
+
bot_message += "\nThe Space is now rebuilding. Say 'check logs' to fetch them."
|
220 |
+
new_workflow = "awaiting_log_check"
|
221 |
+
updated_preview = f'<iframe src="https://huggingface.co/spaces/{new_repo_id}" width="100%" height="500px"></iframe>'
|
222 |
+
else:
|
223 |
+
new_workflow = "idle"
|
224 |
+
else:
|
225 |
+
bot_message += f"\n⚠️ Could not parse code from Gemini.\nResponse:\n{resp}"
|
226 |
+
new_workflow = "awaiting_app_description"
|
227 |
+
|
228 |
+
# Check logs
|
229 |
+
elif new_workflow == "awaiting_log_check" and "check logs" in message.lower():
|
230 |
+
bot_message = "Fetching container logs..."
|
231 |
+
updated_container = get_container_logs_action(new_repo_id, hf_profile, hf_token)
|
232 |
+
updated_build = get_build_logs_action(new_repo_id, hf_profile, hf_token)
|
233 |
+
bot_message += "\n✅ Logs updated. Describe any errors or say 'generate fix'."
|
234 |
+
new_workflow = "reviewing_logs"
|
235 |
+
|
236 |
+
# Auto-generate fix
|
237 |
+
elif new_workflow == "reviewing_logs" and "generate fix" in message.lower():
|
238 |
+
latest = get_container_logs_action(new_repo_id, hf_profile, hf_token)
|
239 |
+
if "Error" not in latest and "Exception" not in latest:
|
240 |
+
bot_message = "No clear error found. What should I fix?"
|
241 |
+
new_workflow = "reviewing_logs"
|
242 |
+
else:
|
243 |
+
bot_message = "Generating a fix based on logs..."
|
244 |
+
new_workflow = "debugging"
|
245 |
+
|
246 |
+
# Reset workflow
|
247 |
+
elif "reset" in message.lower():
|
248 |
+
bot_message = "Workflow reset."
|
249 |
+
new_repo_id = None
|
250 |
+
updated_preview = "<p>No Space created yet.</p>"
|
251 |
+
updated_container = ""
|
252 |
+
updated_build = ""
|
253 |
+
new_workflow = "idle"
|
254 |
+
|
255 |
+
else:
|
256 |
+
bot_message = ("Command not recognized. You can ask to 'create', "
|
257 |
+
"'check logs', 'generate fix', or 'reset'.")
|
258 |
+
new_workflow = workflow_state
|
259 |
+
|
260 |
+
except Exception as e:
|
261 |
+
bot_message = f"An unexpected error occurred: {e}"
|
262 |
+
new_workflow = "idle"
|
263 |
+
|
264 |
+
# Append bot response
|
265 |
+
if history and history[-1][1] is None:
|
266 |
+
history[-1][1] = bot_message
|
267 |
+
else:
|
268 |
+
history.append([None, bot_message])
|
269 |
+
|
270 |
+
return history, new_repo_id, new_workflow, updated_preview, updated_container, updated_build
|
271 |
+
|
272 |
+
# --- Build the Gradio UI ---
|
273 |
+
|
274 |
+
with gr.Blocks(title="AI-Powered HF Space App Builder") as ai_builder_tab:
|
275 |
+
hf_profile = gr.State(None)
|
276 |
+
hf_token = gr.State(None)
|
277 |
+
gemini_key = gr.State(None)
|
278 |
+
repo_id = gr.State(None)
|
279 |
+
workflow = gr.State("idle")
|
280 |
+
sdk_state = gr.State("gradio")
|
281 |
+
|
282 |
+
with gr.Row():
|
283 |
+
# Sidebar
|
284 |
+
with gr.Column(scale=1, min_width=300):
|
285 |
+
gr.Markdown("## Hugging Face Login")
|
286 |
+
login_status = gr.Markdown("*Not logged in.*")
|
287 |
+
login_btn = gr.LoginButton(variant="huggingface")
|
288 |
+
|
289 |
+
login_btn.logout(
|
290 |
+
lambda: (None, None, "*Not logged in.*"),
|
291 |
+
outputs=[hf_profile, hf_token, login_status]
|
292 |
+
).then(
|
293 |
+
show_profile,
|
294 |
+
inputs=[hf_profile],
|
295 |
+
outputs=[login_status]
|
296 |
+
).then(
|
297 |
+
lambda profile, token: (profile, token),
|
298 |
+
inputs=[login_btn],
|
299 |
+
outputs=[hf_profile, hf_token]
|
300 |
+
)
|
301 |
+
|
302 |
+
gr.Markdown("## Google AI Studio API Key")
|
303 |
+
gemini_input = gr.Textbox(label="API Key", type="password")
|
304 |
+
gemini_status = gr.Markdown("")
|
305 |
+
gemini_input.change(
|
306 |
+
lambda k: k,
|
307 |
+
inputs=[gemini_input],
|
308 |
+
outputs=[gemini_key]
|
309 |
+
).then(
|
310 |
+
configure_gemini,
|
311 |
+
inputs=[gemini_key],
|
312 |
+
outputs=[gemini_status]
|
313 |
+
)
|
314 |
+
|
315 |
+
gr.Markdown("## Space SDK")
|
316 |
+
sdk_selector = gr.Radio(
|
317 |
+
choices=["gradio", "streamlit"],
|
318 |
+
value="gradio",
|
319 |
+
label="Template SDK"
|
320 |
+
)
|
321 |
+
sdk_selector.change(
|
322 |
+
lambda s: s,
|
323 |
+
inputs=[sdk_selector],
|
324 |
+
outputs=[sdk_state]
|
325 |
+
)
|
326 |
+
|
327 |
+
# Main content
|
328 |
+
with gr.Column(scale=3):
|
329 |
+
chatbot = gr.Chatbot()
|
330 |
+
user_input = gr.Textbox(placeholder="Type your message…")
|
331 |
+
send_btn = gr.Button("Send")
|
332 |
+
|
333 |
+
iframe = gr.HTML("<p>No Space created yet.</p>")
|
334 |
+
build_txt = gr.Textbox(label="Build Logs", lines=10, interactive=False)
|
335 |
+
run_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False)
|
336 |
+
|
337 |
+
def wrap_chat(msg, history, prof, tok, key, rid, wf, sdk, prev, run_l, build_l):
|
338 |
+
hist = [[u, v] for u, v in history]
|
339 |
+
new_hist, new_rid, new_wf, new_prev, new_run, new_build = ai_workflow_chat(
|
340 |
+
msg, hist, prof, tok, key, rid, wf, sdk, prev, run_l, build_l
|
341 |
+
)
|
342 |
+
out_hist = [(u or "", v or "") for u, v in new_hist]
|
343 |
+
return out_hist, new_rid, new_wf, new_prev, new_run, new_build
|
344 |
+
|
345 |
+
send_btn.click(
|
346 |
+
wrap_chat,
|
347 |
+
inputs=[
|
348 |
+
user_input, chatbot,
|
349 |
+
hf_profile, hf_token, gemini_key,
|
350 |
+
repo_id, workflow, sdk_state,
|
351 |
+
iframe, run_txt, build_txt
|
352 |
+
],
|
353 |
+
outputs=[
|
354 |
+
chatbot,
|
355 |
+
repo_id, workflow,
|
356 |
+
iframe, run_txt, build_txt
|
357 |
+
]
|
358 |
+
)
|
359 |
+
|
360 |
+
with gr.Blocks(title="Manual Hugging Face Space Manager") as manual_control_tab:
|
361 |
+
manual_profile = gr.State(None)
|
362 |
+
manual_token = gr.State(None)
|
363 |
+
manual_repo = gr.State(None)
|
364 |
+
|
365 |
+
gr.Markdown("## Manual Sign-In & Space Management")
|
366 |
+
manual_login_btn = gr.LoginButton(variant="huggingface", size="lg")
|
367 |
+
manual_status = gr.Markdown("*Not logged in.*")
|
368 |
+
manual_models = gr.Markdown()
|
369 |
+
|
370 |
+
manual_login_btn.logout(
|
371 |
+
lambda: (None, None, "*Not logged in.*", ""),
|
372 |
+
outputs=[manual_profile, manual_token, manual_status, manual_repo]
|
373 |
+
).then(
|
374 |
+
show_profile,
|
375 |
+
inputs=[manual_profile],
|
376 |
+
outputs=[manual_status]
|
377 |
+
).then(
|
378 |
+
lambda profile, token: (profile, token),
|
379 |
+
inputs=[manual_login_btn],
|
380 |
+
outputs=[manual_profile, manual_token]
|
381 |
+
).then(
|
382 |
+
list_private_models,
|
383 |
+
inputs=[manual_profile],
|
384 |
+
outputs=[manual_models]
|
385 |
)
|
386 |
|
387 |
+
manual_repo_name = gr.Textbox(label="New Space name", placeholder="my-space")
|
388 |
+
manual_sdk_sel = gr.Radio(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
389 |
choices=["gradio","streamlit"],
|
390 |
value="gradio",
|
391 |
+
label="Template SDK"
|
392 |
)
|
393 |
+
manual_create_btn = gr.Button("Create Space", interactive=False)
|
394 |
+
manual_create_logs= gr.Textbox(label="Create Logs", lines=3, interactive=False)
|
395 |
+
manual_preview = gr.HTML("<p>No Space created yet.</p>")
|
396 |
+
|
397 |
+
manual_login_btn.click(
|
398 |
+
lambda prof: gr.update(interactive=prof is not None),
|
399 |
+
inputs=[manual_profile],
|
400 |
+
outputs=[manual_create_btn]
|
|
|
|
|
|
|
|
|
401 |
)
|
402 |
|
403 |
+
manual_create_btn.click(
|
404 |
+
create_space_action,
|
405 |
+
inputs=[manual_repo_name, manual_sdk_sel, manual_profile, manual_token],
|
406 |
+
outputs=[manual_repo, manual_preview]
|
407 |
+
).then(
|
408 |
+
lambda x: "",
|
409 |
+
outputs=[manual_create_logs]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
410 |
)
|
411 |
|
412 |
+
# File upload
|
413 |
+
manual_path = gr.Textbox(label="Path in Space", value="app.py")
|
414 |
+
manual_file = gr.File(label="Select file")
|
415 |
+
manual_up_btn = gr.Button("Upload File", interactive=False)
|
416 |
+
manual_up_log = gr.Textbox(label="Upload Logs", lines=2, interactive=False)
|
417 |
+
|
418 |
+
manual_repo.change(
|
419 |
+
lambda rid, prof: gr.update(interactive=bool(rid and prof)),
|
420 |
+
inputs=[manual_repo, manual_profile],
|
421 |
+
outputs=[manual_up_btn]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
422 |
)
|
423 |
+
manual_login_btn.click(
|
424 |
+
lambda rid, prof: gr.update(interactive=bool(rid and prof)),
|
425 |
+
inputs=[manual_repo, manual_profile],
|
426 |
+
outputs=[manual_up_btn]
|
427 |
+
)
|
428 |
+
|
429 |
+
manual_up_btn.click(
|
430 |
+
upload_file_to_space_action,
|
431 |
+
inputs=[manual_file, manual_path, manual_repo, manual_profile, manual_token],
|
432 |
+
outputs=[manual_up_log]
|
433 |
)
|
434 |
|
435 |
+
# Logs
|
436 |
+
manual_build_btn = gr.Button("Fetch Build Logs", interactive=False)
|
437 |
+
manual_container_btn = gr.Button("Fetch Container Logs", interactive=False)
|
438 |
+
manual_build_txt = gr.Textbox(label="Build Logs", lines=10, interactive=False)
|
439 |
+
manual_container_txt = gr.Textbox(label="Container Logs", lines=10, interactive=False)
|
440 |
+
|
441 |
+
for btn in (manual_build_btn, manual_container_btn):
|
442 |
+
manual_repo.change(
|
443 |
+
lambda rid, prof: gr.update(interactive=bool(rid and prof)),
|
444 |
+
inputs=[manual_repo, manual_profile],
|
445 |
+
outputs=[btn]
|
446 |
+
)
|
447 |
+
manual_login_btn.click(
|
448 |
+
lambda rid, prof: gr.update(interactive=bool(rid and prof)),
|
449 |
+
inputs=[manual_repo, manual_profile],
|
450 |
+
outputs=[btn]
|
451 |
+
)
|
452 |
+
|
453 |
+
manual_build_btn.click(
|
454 |
+
get_build_logs_action,
|
455 |
+
inputs=[manual_repo, manual_profile, manual_token],
|
456 |
+
outputs=[manual_build_txt]
|
457 |
+
)
|
458 |
+
manual_container_btn.click(
|
459 |
+
get_container_logs_action,
|
460 |
+
inputs=[manual_repo, manual_profile, manual_token],
|
461 |
+
outputs=[manual_container_txt]
|
462 |
+
)
|
463 |
+
|
464 |
+
demo = gr.TabbedInterface(
|
465 |
+
[ai_builder_tab, manual_control_tab],
|
466 |
+
["AI App Builder", "Manual Control"]
|
467 |
+
)
|
468 |
+
|
469 |
if __name__ == "__main__":
|
470 |
demo.launch()
|