Spaces:
Sleeping
Sleeping
File size: 6,757 Bytes
d4f7838 c338ef9 32de25b c338ef9 3dc01aa d4f7838 6e9c777 d4f7838 c338ef9 d4f7838 c338ef9 d4f7838 c338ef9 d4f7838 6e9c777 d4f7838 6e9c777 3536086 6e9c777 3536086 6e9c777 3536086 6e9c777 32de25b d4f7838 5d26448 6e9c777 d4f7838 6e9c777 a28d73c 6e9c777 d4f7838 248d55e 476039c c338ef9 d4f7838 6e9c777 3536086 6e9c777 d4f7838 476039c 3536086 248d55e 3536086 6e9c777 d4f7838 835de23 6e9c777 a28d73c c338ef9 6e9c777 d4f7838 3536086 d4f7838 3536086 d4f7838 3536086 d4f7838 3536086 d4f7838 3536086 d4f7838 3536086 d4f7838 3536086 d4f7838 934dfd0 6e9c777 dbd6fa0 6e9c777 835de23 3536086 b8c1a3d 6e9c777 178c2d5 3536086 a28d73c 248d55e 253f2c6 d4f7838 253f2c6 248d55e d4f7838 d601a02 248d55e d4f7838 3536086 248d55e 3536086 248d55e 3536086 248d55e 3536086 248d55e a28d73c 3536086 248d55e a28d73c 2528f91 d4f7838 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 |
import re
import time
import importlib.metadata
from huggingface_hub import create_repo, upload_file, list_models, constants
from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status
from google import genai
from google.genai.types import Tool, GenerateContentConfig, GoogleSearch
import gradio as gr
# — UTILITIES —
def get_sdk_version(sdk_choice: str) -> str:
pkg = "gradio" if sdk_choice == "gradio" else "streamlit"
try:
return importlib.metadata.version(pkg)
except importlib.metadata.PackageNotFoundError:
return "UNKNOWN"
def extract_code(text: str) -> str:
"""
Pull out the last ```…``` block, or fall back to the whole text.
"""
blocks = re.findall(r"```(?:\w*\n)?([\s\S]*?)```", text)
return blocks[-1].strip() if blocks else text.strip()
# — HF SPACE LOGGING —
def _get_space_jwt(repo_id: str):
url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt"
r = get_session().get(url, headers=build_hf_headers())
hf_raise_for_status(r)
return r.json()["token"]
def fetch_logs(repo_id: str, level: str) -> str:
jwt = _get_space_jwt(repo_id)
logs_url = f"https://api.hf.space/v1/{repo_id}/logs/{level}"
lines = []
with get_session().get(logs_url, headers=build_hf_headers(token=jwt), stream=True) as resp:
hf_raise_for_status(resp)
for raw in resp.iter_lines():
if raw.startswith(b"data: "):
try:
ev = json.loads(raw[len(b"data: "):].decode())
ts = ev.get("timestamp","")
txt = ev.get("data","")
lines.append(f"[{ts}] {txt}")
except:
continue
return "\n".join(lines)
# — CORE LOOP —
def handle_user_message(
history,
sdk_choice: str,
gemini_api_key: str,
grounding_enabled: bool,
profile: gr.OAuthProfile | None,
oauth_token: gr.OAuthToken | None
):
if not profile or not oauth_token:
return history + [{"role":"assistant","content":"⚠️ Please log in first."}], "", "", "<p>No Space yet.</p>"
client = genai.Client(api_key=gemini_api_key)
system_msg = {
"role":"system",
"content":(
f"You are an AI assistant writing a HuggingFace Space using the "
f"{sdk_choice} SDK. After producing code, wait for logs; if errors appear, fix them."
)
}
chat = [system_msg] + history
code_fn = "app.py" if sdk_choice=="gradio" else "streamlit_app.py"
readme_fn = "README.md"
reqs_fn = "requirements.txt"
repo_id = f"{profile.username}/{profile.username}-auto-space"
build_logs = run_logs = ""
for _ in range(5):
tools = [Tool(google_search=GoogleSearch())] if grounding_enabled else []
cfg = GenerateContentConfig(tools=tools, response_modalities=["TEXT"])
resp = client.models.generate_content(
model="gemini-2.5-flash-preview-04-17",
contents=[m["content"] for m in chat],
config=cfg
)
raw = resp.text
code = extract_code(raw)
chat.append({"role":"assistant","content":code})
# write code
with open(code_fn, "w") as f:
f.write(code)
# write dynamic README
sdk_version = get_sdk_version(sdk_choice)
readme = f"""---
title: Wuhp Auto Space
emoji: 🐢
colorFrom: red
colorTo: pink
sdk: {sdk_choice}
sdk_version: {sdk_version}
app_file: {code_fn}
pinned: false
---
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
"""
with open(readme_fn, "w") as f:
f.write(readme)
# write requirements
base_reqs = "pandas\n"
extra = "streamlit\n" if sdk_choice=="streamlit" else "gradio\n"
with open(reqs_fn, "w") as f:
f.write(base_reqs + extra)
# push to HF
create_repo(repo_id=repo_id, token=oauth_token.token,
exist_ok=True, repo_type="space", space_sdk=sdk_choice)
for fn in (code_fn, readme_fn, reqs_fn):
upload_file(path_or_fileobj=fn, path_in_repo=fn,
repo_id=repo_id, token=oauth_token.token,
repo_type="space")
build_logs = fetch_logs(repo_id, "build")
run_logs = fetch_logs(repo_id, "run")
if "ERROR" not in build_logs.upper() and "ERROR" not in run_logs.upper():
break
chat.append({
"role":"user",
"content":(
f"Build logs:\n{build_logs}\n\n"
f"Run logs:\n{run_logs}\n\n"
"Please fix the code."
)
})
time.sleep(2)
messages = [{"role":m["role"],"content":m["content"]} for m in chat if m["role"]!="system"]
iframe = f'<iframe src="https://huggingface.co/spaces/{repo_id}" width="100%" height="500px"></iframe>'
return messages, build_logs, run_logs, iframe
# — BUILD THE UI —
with gr.Blocks(title="HF Space Auto‑Builder") as demo:
gr.Markdown("## Sign in + Auto‑Build Spaces\n\n..."
)
login_btn = gr.LoginButton(variant="huggingface", size="lg")
status_md = gr.Markdown("*Not logged in.*")
models_md = gr.Markdown()
demo.load(show_profile, inputs=None, outputs=status_md)
demo.load(list_private_models, inputs=None, outputs=models_md)
login_btn.click(show_profile, inputs=None, outputs=status_md)
login_btn.click(list_private_models, inputs=None, outputs=models_md)
sdk_choice = gr.Radio(["gradio","streamlit"], value="gradio", label="SDK template")
api_key = gr.Textbox(label="Gemini API Key", type="password")
grounding = gr.Checkbox(label="Enable grounding", value=False)
chatbot = gr.Chatbot(type="messages")
user_in = gr.Textbox(placeholder="Your prompt…", label="Prompt")
send_btn = gr.Button("Send")
build_box = gr.Textbox(label="Build logs", lines=5, interactive=False)
run_box = gr.Textbox(label="Run logs", lines=5, interactive=False)
preview = gr.HTML("<p>No Space yet.</p>")
send_btn.click(
fn=handle_user_message,
inputs=[chatbot, sdk_choice, api_key, grounding],
outputs=[chatbot, build_box, run_box, preview]
)
# — New “Refresh Logs” control for manual edits —
refresh_btn = gr.Button("Refresh Logs")
def _refresh(profile, token):
repo = f"{profile.username}/{profile.username}-auto-space"
return fetch_logs(repo, "build"), fetch_logs(repo, "run")
refresh_btn.click(_refresh, inputs=[status_md, models_md], outputs=[build_box, run_box])
demo.launch(server_name="0.0.0.0", server_port=7860)
|