Spaces:
Sleeping
Sleeping
import gradio as gr | |
import json, time | |
from huggingface_hub import create_repo, upload_file, constants | |
from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status | |
from google import genai # Gemini Python SDK | |
from google.genai.types import Tool, GenerateContentConfig, GoogleSearch | |
# — HELPERS FOR HF SPACE LOGS — | |
def _get_space_jwt(repo_id: str): | |
url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt" | |
r = get_session().get(url, headers=build_hf_headers()) | |
hf_raise_for_status(r) | |
return r.json()["token"] | |
def fetch_logs(repo_id: str, level: str): | |
jwt = _get_space_jwt(repo_id) | |
logs_url = f"https://api.hf.space/v1/{repo_id}/logs/{level}" | |
lines = [] | |
with get_session().get(logs_url, headers=build_hf_headers(token=jwt), stream=True) as resp: | |
hf_raise_for_status(resp) | |
for raw in resp.iter_lines(): | |
if raw.startswith(b"data: "): | |
try: | |
ev = json.loads(raw[len(b"data: "):].decode()) | |
ts = ev.get("timestamp","") | |
txt = ev.get("data","") | |
lines.append(f"[{ts}] {txt}") | |
except: | |
continue | |
return "\n".join(lines) | |
# — CORE LOOP: send prompt & (iteratively) deploy — | |
def handle_user_message( | |
history, | |
hf_profile, hf_token, | |
sdk_choice: str, | |
gemini_api_key, grounding_enabled | |
): | |
# Ensure we actually have a profile & token | |
if hf_profile is None or hf_token is None: | |
return history + [("assistant", "⚠️ Please log in first.")], "", "", "<p>No Space yet.</p>" | |
genai_client = genai.Client(api_key=gemini_api_key) | |
chat = [{ | |
"role":"system", | |
"content":( | |
f"You are an AI assistant that writes a HuggingFace Space using the " | |
f"{sdk_choice} SDK. After producing code, wait for logs. " | |
"If errors appear, fix them and return the full updated code." | |
) | |
}] | |
for role, msg in history: | |
chat.append({"role": role, "content": msg}) | |
filename = "app.py" if sdk_choice=="gradio" else "streamlit_app.py" | |
build_logs = run_logs = "" | |
for _ in range(5): | |
# build tool list for grounding | |
tools = [] | |
if grounding_enabled: | |
tools.append(Tool(google_search=GoogleSearch())) | |
config = GenerateContentConfig(tools=tools, response_modalities=["TEXT"]) | |
resp = genai_client.models.generate_content( | |
model="gemini-2.5-flash-preview-04-17", | |
contents=[c["content"] for c in chat], | |
config=config | |
) | |
ai_code = resp.text | |
chat.append({"role":"assistant", "content": ai_code}) | |
# write & deploy | |
with open(filename, "w") as f: | |
f.write(ai_code) | |
repo_id = f"{hf_profile.username}/{hf_profile.username}-auto-space" | |
create_repo( | |
repo_id=repo_id, | |
token=hf_token.token, | |
exist_ok=True, | |
repo_type="space", | |
space_sdk=sdk_choice | |
) | |
upload_file( | |
path_or_fileobj=filename, | |
path_in_repo=filename, | |
repo_id=repo_id, | |
token=hf_token.token, | |
repo_type="space" | |
) | |
build_logs = fetch_logs(repo_id, "build") | |
run_logs = fetch_logs(repo_id, "run") | |
if "ERROR" not in build_logs.upper() and "ERROR" not in run_logs.upper(): | |
break | |
chat.append({ | |
"role":"user", | |
"content":( | |
f"Build logs:\n{build_logs}\n\n" | |
f"Run logs:\n{run_logs}\n\n" | |
"Please fix the code." | |
) | |
}) | |
time.sleep(2) | |
new_history = [(h["role"], h["content"]) for h in chat if h["role"]!="system"] | |
iframe = f'<iframe src="https://huggingface.co/spaces/{repo_id}" width="100%" height="500px"></iframe>' | |
return new_history, build_logs, run_logs, iframe | |
# — GRADIO UI — | |
with gr.Blocks(title="HF Space Auto‑Builder (Gradio & Streamlit)") as demo: | |
with gr.Row(): | |
with gr.Column(scale=1): | |
gr.Markdown("### Sidebar") | |
# hidden state holders for OAuthProfile and OAuthToken | |
profile_state = gr.State() | |
token_state = gr.State() | |
login_btn = gr.LoginButton("huggingface", size="sm") | |
login_status = gr.Markdown("*Not logged in.*") | |
# capture profile & token into state | |
login_btn.click( | |
lambda profile, oauth_token: (profile, oauth_token), | |
outputs=[profile_state, token_state] | |
) | |
# update status text | |
login_btn.click( | |
lambda profile, oauth_token: f"Logged in as **{profile.username}**" | |
if profile else "*Not logged in.*", | |
outputs=login_status | |
) | |
sdk_choice = gr.Radio( | |
["gradio","streamlit"], value="gradio", label="SDK Template" | |
) | |
api_key = gr.Textbox(label="Gemini API Key", type="password") | |
grounding = gr.Checkbox(label="Enable grounding", value=False) | |
with gr.Column(scale=3): | |
chatbot = gr.Chatbot(type="messages") | |
user_in = gr.Textbox(placeholder="Your prompt …", label="Prompt") | |
send_btn = gr.Button("Send") | |
build_box = gr.Textbox(label="Build logs", lines=5, interactive=False) | |
run_box = gr.Textbox(label="Run logs", lines=5, interactive=False) | |
preview = gr.HTML("<p>No Space yet.</p>") | |
send_btn.click( | |
fn=handle_user_message, | |
inputs=[ | |
chatbot, | |
profile_state, token_state, | |
sdk_choice, api_key, grounding | |
], | |
outputs=[chatbot, build_box, run_box, preview] | |
) | |
if __name__=="__main__": | |
demo.launch() | |