Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -52,7 +52,7 @@ def fetch_logs(repo_id: str, level: str):
|
|
52 |
continue
|
53 |
return "\n".join(lines)
|
54 |
|
55 |
-
# — CORE LOOP: ASK LLM FOR
|
56 |
|
57 |
def handle_user_message(
|
58 |
history, # list of {"role","content"} dicts
|
@@ -63,20 +63,18 @@ def handle_user_message(
|
|
63 |
profile: gr.OAuthProfile | None,
|
64 |
oauth_token: gr.OAuthToken | None
|
65 |
):
|
66 |
-
# Require login
|
67 |
if not (profile and oauth_token):
|
68 |
return history + [{"role":"assistant","content":"⚠️ Please log in first."}], "", "", "<p>No Space yet.</p>"
|
69 |
|
70 |
client = genai.Client(api_key=gemini_api_key)
|
71 |
|
72 |
-
# System instructions
|
73 |
system_msg = {
|
74 |
"role":"system",
|
75 |
"content":(
|
76 |
"Return exactly one JSON object matching this schema:\n"
|
77 |
" • repo_name (string)\n"
|
78 |
" • files (object mapping filename→file-content)\n\n"
|
79 |
-
"Files must include
|
80 |
" - A code file (default name: app.py unless you choose otherwise)\n"
|
81 |
" - requirements.txt with dependencies\n"
|
82 |
" - README.md with frontmatter (title, emoji, sdk, sdk_version, app_file)\n\n"
|
@@ -84,14 +82,12 @@ def handle_user_message(
|
|
84 |
)
|
85 |
}
|
86 |
|
87 |
-
# Build chat context
|
88 |
chat = [system_msg] + history + [{"role":"user", "content":user_prompt}]
|
89 |
-
|
90 |
repo_id = None
|
91 |
build_logs = run_logs = ""
|
92 |
|
93 |
for _ in range(5):
|
94 |
-
#
|
95 |
if sdk_choice == "gradio":
|
96 |
import gradio as _gr; sdk_version = _gr.__version__
|
97 |
else:
|
@@ -102,20 +98,19 @@ def handle_user_message(
|
|
102 |
tools=tools,
|
103 |
response_modalities=["TEXT"],
|
104 |
response_mime_type="application/json",
|
105 |
-
response_schema=RepoSpec
|
106 |
)
|
107 |
|
108 |
-
# Call the LLM
|
109 |
resp = client.models.generate_content(
|
110 |
model="gemini-2.5-flash-preview-04-17",
|
111 |
contents=[m["content"] for m in chat],
|
112 |
config=cfg
|
113 |
)
|
|
|
114 |
spec = RepoSpec.model_validate_json(resp.text)
|
115 |
repo_name = spec.repo_name
|
116 |
files = spec.files
|
117 |
|
118 |
-
# Prepare repo
|
119 |
repo_id = f"{profile.username}/{repo_name}"
|
120 |
create_repo(
|
121 |
repo_id=repo_id,
|
@@ -125,9 +120,8 @@ def handle_user_message(
|
|
125 |
space_sdk=sdk_choice
|
126 |
)
|
127 |
|
128 |
-
#
|
129 |
for fn, content in files.items():
|
130 |
-
# Replace placeholder in README.md
|
131 |
if fn.lower() == "readme.md":
|
132 |
content = content.replace("<SDK_VERSION>", sdk_version)
|
133 |
with open(fn, "w") as f:
|
@@ -140,15 +134,11 @@ def handle_user_message(
|
|
140 |
repo_type="space"
|
141 |
)
|
142 |
|
143 |
-
# Fetch logs
|
144 |
build_logs = fetch_logs(repo_id, "build")
|
145 |
run_logs = fetch_logs(repo_id, "run")
|
146 |
-
|
147 |
-
# Stop if clean
|
148 |
if "ERROR" not in build_logs.upper() and "ERROR" not in run_logs.upper():
|
149 |
break
|
150 |
|
151 |
-
# Feed errors back
|
152 |
chat.append({
|
153 |
"role":"user",
|
154 |
"content":(
|
@@ -159,7 +149,6 @@ def handle_user_message(
|
|
159 |
})
|
160 |
time.sleep(2)
|
161 |
|
162 |
-
# Prepare UI outputs
|
163 |
messages = [{"role":m["role"], "content":m["content"]} for m in chat if m["role"]!="system"]
|
164 |
iframe = f'<iframe src="https://huggingface.co/spaces/{repo_id}" width="100%" height="500px"></iframe>'
|
165 |
return messages, build_logs, run_logs, iframe
|
@@ -169,9 +158,8 @@ def handle_user_message(
|
|
169 |
with gr.Blocks(title="HF Space Auto‑Builder") as demo:
|
170 |
gr.Markdown("## Sign in + Auto‑Build Spaces\n\n"
|
171 |
"1. Sign in 2. Prompt 3. Deploy & Debug\n\n"
|
172 |
-
"
|
173 |
|
174 |
-
# Login
|
175 |
login_btn = gr.LoginButton(variant="huggingface", size="lg")
|
176 |
status_md = gr.Markdown("*Not logged in.*")
|
177 |
models_md = gr.Markdown()
|
@@ -180,12 +168,10 @@ with gr.Blocks(title="HF Space Auto‑Builder") as demo:
|
|
180 |
login_btn.click(show_profile, None, status_md)
|
181 |
login_btn.click(list_private_models, None, models_md)
|
182 |
|
183 |
-
# Controls
|
184 |
sdk_choice = gr.Radio(["gradio","streamlit"], "gradio", label="SDK")
|
185 |
api_key = gr.Textbox(label="Gemini API Key", type="password")
|
186 |
grounding = gr.Checkbox(label="Enable grounding")
|
187 |
|
188 |
-
# Chat + Outputs
|
189 |
chatbot = gr.Chatbot(type="messages")
|
190 |
user_in = gr.Textbox(label="Prompt", placeholder="e.g. Build a CSV inspector…")
|
191 |
send_btn = gr.Button("Send")
|
|
|
52 |
continue
|
53 |
return "\n".join(lines)
|
54 |
|
55 |
+
# — CORE LOOP: ASK LLM FOR JSON, WRITE & DEPLOY —
|
56 |
|
57 |
def handle_user_message(
|
58 |
history, # list of {"role","content"} dicts
|
|
|
63 |
profile: gr.OAuthProfile | None,
|
64 |
oauth_token: gr.OAuthToken | None
|
65 |
):
|
|
|
66 |
if not (profile and oauth_token):
|
67 |
return history + [{"role":"assistant","content":"⚠️ Please log in first."}], "", "", "<p>No Space yet.</p>"
|
68 |
|
69 |
client = genai.Client(api_key=gemini_api_key)
|
70 |
|
|
|
71 |
system_msg = {
|
72 |
"role":"system",
|
73 |
"content":(
|
74 |
"Return exactly one JSON object matching this schema:\n"
|
75 |
" • repo_name (string)\n"
|
76 |
" • files (object mapping filename→file-content)\n\n"
|
77 |
+
"Files must include:\n"
|
78 |
" - A code file (default name: app.py unless you choose otherwise)\n"
|
79 |
" - requirements.txt with dependencies\n"
|
80 |
" - README.md with frontmatter (title, emoji, sdk, sdk_version, app_file)\n\n"
|
|
|
82 |
)
|
83 |
}
|
84 |
|
|
|
85 |
chat = [system_msg] + history + [{"role":"user", "content":user_prompt}]
|
|
|
86 |
repo_id = None
|
87 |
build_logs = run_logs = ""
|
88 |
|
89 |
for _ in range(5):
|
90 |
+
# detect SDK version
|
91 |
if sdk_choice == "gradio":
|
92 |
import gradio as _gr; sdk_version = _gr.__version__
|
93 |
else:
|
|
|
98 |
tools=tools,
|
99 |
response_modalities=["TEXT"],
|
100 |
response_mime_type="application/json",
|
101 |
+
response_schema=RepoSpec # <<< use the Pydantic model here
|
102 |
)
|
103 |
|
|
|
104 |
resp = client.models.generate_content(
|
105 |
model="gemini-2.5-flash-preview-04-17",
|
106 |
contents=[m["content"] for m in chat],
|
107 |
config=cfg
|
108 |
)
|
109 |
+
# validate & parse
|
110 |
spec = RepoSpec.model_validate_json(resp.text)
|
111 |
repo_name = spec.repo_name
|
112 |
files = spec.files
|
113 |
|
|
|
114 |
repo_id = f"{profile.username}/{repo_name}"
|
115 |
create_repo(
|
116 |
repo_id=repo_id,
|
|
|
120 |
space_sdk=sdk_choice
|
121 |
)
|
122 |
|
123 |
+
# write & upload files
|
124 |
for fn, content in files.items():
|
|
|
125 |
if fn.lower() == "readme.md":
|
126 |
content = content.replace("<SDK_VERSION>", sdk_version)
|
127 |
with open(fn, "w") as f:
|
|
|
134 |
repo_type="space"
|
135 |
)
|
136 |
|
|
|
137 |
build_logs = fetch_logs(repo_id, "build")
|
138 |
run_logs = fetch_logs(repo_id, "run")
|
|
|
|
|
139 |
if "ERROR" not in build_logs.upper() and "ERROR" not in run_logs.upper():
|
140 |
break
|
141 |
|
|
|
142 |
chat.append({
|
143 |
"role":"user",
|
144 |
"content":(
|
|
|
149 |
})
|
150 |
time.sleep(2)
|
151 |
|
|
|
152 |
messages = [{"role":m["role"], "content":m["content"]} for m in chat if m["role"]!="system"]
|
153 |
iframe = f'<iframe src="https://huggingface.co/spaces/{repo_id}" width="100%" height="500px"></iframe>'
|
154 |
return messages, build_logs, run_logs, iframe
|
|
|
158 |
with gr.Blocks(title="HF Space Auto‑Builder") as demo:
|
159 |
gr.Markdown("## Sign in + Auto‑Build Spaces\n\n"
|
160 |
"1. Sign in 2. Prompt 3. Deploy & Debug\n\n"
|
161 |
+
"_LLM controls filenames, code, README, requirements, and loops until successful._\n\n---")
|
162 |
|
|
|
163 |
login_btn = gr.LoginButton(variant="huggingface", size="lg")
|
164 |
status_md = gr.Markdown("*Not logged in.*")
|
165 |
models_md = gr.Markdown()
|
|
|
168 |
login_btn.click(show_profile, None, status_md)
|
169 |
login_btn.click(list_private_models, None, models_md)
|
170 |
|
|
|
171 |
sdk_choice = gr.Radio(["gradio","streamlit"], "gradio", label="SDK")
|
172 |
api_key = gr.Textbox(label="Gemini API Key", type="password")
|
173 |
grounding = gr.Checkbox(label="Enable grounding")
|
174 |
|
|
|
175 |
chatbot = gr.Chatbot(type="messages")
|
176 |
user_in = gr.Textbox(label="Prompt", placeholder="e.g. Build a CSV inspector…")
|
177 |
send_btn = gr.Button("Send")
|