wuhp commited on
Commit
b874ac3
·
verified ·
1 Parent(s): 18ec03a

Create backup.py

Browse files
Files changed (1) hide show
  1. backup.py +211 -0
backup.py ADDED
@@ -0,0 +1,211 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ import json
3
+ import time
4
+ import importlib.metadata
5
+ import gradio as gr
6
+ from huggingface_hub import create_repo, upload_file, list_models, constants
7
+ from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status
8
+ from google import genai
9
+ from google.genai.types import Tool, GenerateContentConfig, GoogleSearch
10
+
11
+ # — USER INFO & MODEL LISTING —
12
+
13
+ def show_profile(profile: gr.OAuthProfile | None) -> str:
14
+ if profile is None:
15
+ return "*Not logged in.*"
16
+ return f"✅ Logged in as **{profile.username}**"
17
+
18
+ def list_private_models(
19
+ profile: gr.OAuthProfile | None,
20
+ oauth_token: gr.OAuthToken | None
21
+ ) -> str:
22
+ if profile is None or oauth_token is None:
23
+ return "Please log in to see your models."
24
+ models = [
25
+ f"{m.id} ({'private' if m.private else 'public'})"
26
+ for m in list_models(author=profile.username, token=oauth_token.token)
27
+ ]
28
+ return "No models found." if not models else "Models:\n\n" + "\n - ".join(models)
29
+
30
+ # — UTILITIES —
31
+
32
+ def get_sdk_version(sdk_choice: str) -> str:
33
+ pkg = "gradio" if sdk_choice == "gradio" else "streamlit"
34
+ try:
35
+ return importlib.metadata.version(pkg)
36
+ except importlib.metadata.PackageNotFoundError:
37
+ return "UNKNOWN"
38
+
39
+ def extract_code(text: str) -> str:
40
+ blocks = re.findall(r"```(?:\w*\n)?([\s\S]*?)```", text)
41
+ return blocks[-1].strip() if blocks else text.strip()
42
+
43
+ # — HF SPACE LOGGING —
44
+
45
+ def _get_space_jwt(repo_id: str):
46
+ url = f"{constants.ENDPOINT}/api/spaces/{repo_id}/jwt"
47
+ r = get_session().get(url, headers=build_hf_headers())
48
+ hf_raise_for_status(r)
49
+ return r.json()["token"]
50
+
51
+ def fetch_logs(repo_id: str, level: str) -> str:
52
+ jwt = _get_space_jwt(repo_id)
53
+ logs_url = f"https://api.hf.space/v1/{repo_id}/logs/{level}"
54
+ lines = []
55
+ with get_session().get(logs_url, headers=build_hf_headers(token=jwt), stream=True) as resp:
56
+ hf_raise_for_status(resp)
57
+ for raw in resp.iter_lines():
58
+ if raw.startswith(b"data: "):
59
+ try:
60
+ ev = json.loads(raw[len(b"data: "):].decode())
61
+ ts = ev.get("timestamp","")
62
+ txt = ev.get("data","")
63
+ lines.append(f"[{ts}] {txt}")
64
+ except:
65
+ continue
66
+ return "\n".join(lines)
67
+
68
+ # — CORE LOOP —
69
+
70
+ def handle_user_message(
71
+ history,
72
+ sdk_choice: str,
73
+ gemini_api_key: str,
74
+ grounding_enabled: bool,
75
+ profile: gr.OAuthProfile | None,
76
+ oauth_token: gr.OAuthToken | None
77
+ ):
78
+ if profile is None or oauth_token is None:
79
+ return history + [{"role":"assistant","content":"⚠️ Please log in first."}], "", "", "<p>No Space yet.</p>"
80
+
81
+ client = genai.Client(api_key=gemini_api_key)
82
+ system_msg = {
83
+ "role":"system",
84
+ "content":(
85
+ f"You are an AI assistant writing a HuggingFace Space using the "
86
+ f"{sdk_choice} SDK. After producing code, wait for logs; if errors appear, fix them."
87
+ )
88
+ }
89
+ chat = [system_msg] + history
90
+
91
+ code_fn = "app.py" if sdk_choice=="gradio" else "streamlit_app.py"
92
+ readme_fn = "README.md"
93
+ reqs_fn = "requirements.txt"
94
+ repo_id = f"{profile.username}/{profile.username}-auto-space"
95
+
96
+ build_logs = run_logs = ""
97
+ for _ in range(5):
98
+ tools = [Tool(google_search=GoogleSearch())] if grounding_enabled else []
99
+ cfg = GenerateContentConfig(tools=tools, response_modalities=["TEXT"])
100
+
101
+ resp = client.models.generate_content(
102
+ model="gemini-2.5-flash-preview-04-17",
103
+ contents=[m["content"] for m in chat],
104
+ config=cfg
105
+ )
106
+
107
+ raw = resp.text
108
+ code = extract_code(raw)
109
+ chat.append({"role":"assistant","content":code})
110
+
111
+ # write code
112
+ with open(code_fn, "w") as f:
113
+ f.write(code)
114
+
115
+ # write dynamic README
116
+ sdk_version = get_sdk_version(sdk_choice)
117
+ readme = f"""---
118
+ title: Wuhp Auto Space
119
+ emoji: 🐢
120
+ colorFrom: red
121
+ colorTo: pink
122
+ sdk: {sdk_choice}
123
+ sdk_version: {sdk_version}
124
+ app_file: {code_fn}
125
+ pinned: false
126
+ ---
127
+
128
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
129
+ """
130
+ with open(readme_fn, "w") as f:
131
+ f.write(readme)
132
+
133
+ # write requirements
134
+ base_reqs = "pandas\n"
135
+ extra = "streamlit\n" if sdk_choice=="streamlit" else "gradio\n"
136
+ with open(reqs_fn, "w") as f:
137
+ f.write(base_reqs + extra)
138
+
139
+ # push to HF
140
+ create_repo(repo_id=repo_id, token=oauth_token.token,
141
+ exist_ok=True, repo_type="space", space_sdk=sdk_choice)
142
+ for fn in (code_fn, readme_fn, reqs_fn):
143
+ upload_file(path_or_fileobj=fn, path_in_repo=fn,
144
+ repo_id=repo_id, token=oauth_token.token,
145
+ repo_type="space")
146
+
147
+ build_logs = fetch_logs(repo_id, "build")
148
+ run_logs = fetch_logs(repo_id, "run")
149
+ if "ERROR" not in build_logs.upper() and "ERROR" not in run_logs.upper():
150
+ break
151
+
152
+ chat.append({
153
+ "role":"user",
154
+ "content":(
155
+ f"Build logs:\n{build_logs}\n\n"
156
+ f"Run logs:\n{run_logs}\n\n"
157
+ "Please fix the code."
158
+ )
159
+ })
160
+ time.sleep(2)
161
+
162
+ messages = [{"role":m["role"],"content":m["content"]} for m in chat if m["role"]!="system"]
163
+ iframe = f'<iframe src="https://huggingface.co/spaces/{repo_id}" width="100%" height="500px"></iframe>'
164
+ return messages, build_logs, run_logs, iframe
165
+
166
+ # — BUILD THE UI —
167
+
168
+ with gr.Blocks(title="HF Space Auto‑Builder") as demo:
169
+ gr.Markdown("## Sign in + Auto‑Build Spaces\n\n1. Sign in 2. Enter your prompt 3. Watch code, README, requirements, logs, and preview\n\n---")
170
+
171
+ # LOGIN & MODEL LISTING
172
+ login_btn = gr.LoginButton(variant="huggingface", size="lg")
173
+ status_md = gr.Markdown("*Not logged in.*")
174
+ models_md = gr.Markdown()
175
+ demo.load(show_profile, inputs=None, outputs=status_md)
176
+ demo.load(list_private_models, inputs=None, outputs=models_md)
177
+ login_btn.click(show_profile, inputs=None, outputs=status_md)
178
+ login_btn.click(list_private_models, inputs=None, outputs=models_md)
179
+
180
+ # CONTROLS
181
+ sdk_choice = gr.Radio(["gradio","streamlit"], value="gradio", label="SDK template")
182
+ api_key = gr.Textbox(label="Gemini API Key", type="password")
183
+ grounding = gr.Checkbox(label="Enable grounding", value=False)
184
+
185
+ # CHAT + OUTPUTS
186
+ chatbot = gr.Chatbot(type="messages")
187
+ user_in = gr.Textbox(placeholder="Your prompt…", label="Prompt")
188
+ send_btn = gr.Button("Send")
189
+
190
+ build_box = gr.Textbox(label="Build logs", lines=5, interactive=False)
191
+ run_box = gr.Textbox(label="Run logs", lines=5, interactive=False)
192
+ preview = gr.HTML("<p>No Space yet.</p>")
193
+
194
+ send_btn.click(
195
+ fn=handle_user_message,
196
+ inputs=[chatbot, sdk_choice, api_key, grounding],
197
+ outputs=[chatbot, build_box, run_box, preview]
198
+ )
199
+
200
+ # — Refresh Logs button —
201
+ def _refresh(profile: gr.OAuthProfile | None, oauth_token: gr.OAuthToken | None):
202
+ if not profile or not oauth_token:
203
+ return "", ""
204
+ repo = f"{profile.username}/{profile.username}-auto-space"
205
+ return fetch_logs(repo, "build"), fetch_logs(repo, "run")
206
+
207
+ refresh_btn = gr.Button("Refresh Logs")
208
+ # Gradio will auto‑inject `profile` and `oauth_token` here.
209
+ refresh_btn.click(_refresh, inputs=None, outputs=[build_box, run_box])
210
+
211
+ demo.launch(server_name="0.0.0.0", server_port=7860)