wuhp commited on
Commit
3dc01aa
·
verified ·
1 Parent(s): 6e9c777

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -16
app.py CHANGED
@@ -3,6 +3,7 @@ import json, time
3
  from huggingface_hub import create_repo, upload_file, constants
4
  from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status
5
  from google import genai # Gemini Python SDK
 
6
 
7
  # — HELPERS FOR HF SPACE LOGS —
8
 
@@ -22,7 +23,8 @@ def fetch_logs(repo_id: str, level: str):
22
  if raw.startswith(b"data: "):
23
  try:
24
  ev = json.loads(raw[len(b"data: "):].decode())
25
- ts = ev.get("timestamp",""); txt = ev.get("data","")
 
26
  lines.append(f"[{ts}] {txt}")
27
  except:
28
  continue
@@ -36,7 +38,7 @@ def handle_user_message(
36
  sdk_choice: str,
37
  gemini_api_key, grounding_enabled
38
  ):
39
- # prep Gemini client + chat
40
  genai_client = genai.Client(api_key=gemini_api_key)
41
  chat = [{
42
  "role":"system",
@@ -46,39 +48,51 @@ def handle_user_message(
46
  "If errors appear, fix them and return the full updated code."
47
  )
48
  }]
49
- # include past chat
50
  for role, msg in history:
51
  chat.append({"role": role, "content": msg})
52
 
53
  filename = "app.py" if sdk_choice=="gradio" else "streamlit_app.py"
54
 
55
- # iterate up to 5 times to resolve errors
56
  for _ in range(5):
 
 
 
 
 
 
 
 
 
 
 
 
57
  response = genai_client.models.generate_content(
58
  model="gemini-2.5-flash-preview-04-17",
59
  contents=[c["content"] for c in chat],
60
- config={
61
- "tools": [],
62
- "google_search": grounding_enabled
63
- }
64
  )
65
  ai_code = response.text
66
  chat.append({"role":"assistant", "content": ai_code})
67
 
68
- # write and deploy
69
  with open(filename, "w") as f:
70
  f.write(ai_code)
71
 
 
72
  repo_id = f"{hf_profile.username}/{hf_profile.username}-auto-space"
73
  create_repo(
74
- repo_id=repo_id, token=hf_token.token,
75
- exist_ok=True, repo_type="space",
 
 
76
  space_sdk=sdk_choice
77
  )
78
  upload_file(
79
  path_or_fileobj=filename,
80
  path_in_repo=filename,
81
- repo_id=repo_id, token=hf_token.token,
 
82
  repo_type="space"
83
  )
84
 
@@ -86,10 +100,11 @@ def handle_user_message(
86
  build_logs = fetch_logs(repo_id, "build")
87
  run_logs = fetch_logs(repo_id, "run")
88
 
 
89
  if "ERROR" not in build_logs.upper() and "ERROR" not in run_logs.upper():
90
  break
91
 
92
- # feed errors back
93
  chat.append({
94
  "role":"user",
95
  "content":(
@@ -100,7 +115,7 @@ def handle_user_message(
100
  })
101
  time.sleep(2)
102
 
103
- # prepare outputs
104
  new_history = [(h["role"], h["content"]) for h in chat if h["role"]!="system"]
105
  iframe = f'<iframe src="https://huggingface.co/spaces/{repo_id}" width="100%" height="500px"></iframe>'
106
  return new_history, build_logs, run_logs, iframe
@@ -121,10 +136,10 @@ with gr.Blocks(title="HF Space Auto‑Builder (Gradio & Streamlit)") as demo:
121
  api_key = gr.Textbox(label="Gemini API Key", type="password")
122
  grounding = gr.Checkbox(label="Enable grounding", value=False)
123
 
124
- demo.load(lambda p: f"Logged in as **{p.username}**" if p else "*Not logged in.*",
125
  inputs=None, outputs=login_status)
126
  login_btn.click(
127
- lambda p: f"Logged in as **{p.username}**" if p else "*Not logged in.*",
128
  inputs=None, outputs=login_status
129
  )
130
 
 
3
  from huggingface_hub import create_repo, upload_file, constants
4
  from huggingface_hub.utils import build_hf_headers, get_session, hf_raise_for_status
5
  from google import genai # Gemini Python SDK
6
+ from google.genai.types import Tool, GenerateContentConfig, GoogleSearch
7
 
8
  # — HELPERS FOR HF SPACE LOGS —
9
 
 
23
  if raw.startswith(b"data: "):
24
  try:
25
  ev = json.loads(raw[len(b"data: "):].decode())
26
+ ts = ev.get("timestamp","")
27
+ txt = ev.get("data","")
28
  lines.append(f"[{ts}] {txt}")
29
  except:
30
  continue
 
38
  sdk_choice: str,
39
  gemini_api_key, grounding_enabled
40
  ):
41
+ # Initialize Gemini client + prime the conversation
42
  genai_client = genai.Client(api_key=gemini_api_key)
43
  chat = [{
44
  "role":"system",
 
48
  "If errors appear, fix them and return the full updated code."
49
  )
50
  }]
 
51
  for role, msg in history:
52
  chat.append({"role": role, "content": msg})
53
 
54
  filename = "app.py" if sdk_choice=="gradio" else "streamlit_app.py"
55
 
56
+ build_logs = run_logs = ""
57
  for _ in range(5):
58
+ # --- build the tool list ---
59
+ tools = []
60
+ if grounding_enabled:
61
+ tools.append(Tool(google_search=GoogleSearch()))
62
+
63
+ # --- configure Gemini properly ---
64
+ config = GenerateContentConfig(
65
+ tools=tools,
66
+ response_modalities=["TEXT"],
67
+ )
68
+
69
+ # --- call Gemini ---
70
  response = genai_client.models.generate_content(
71
  model="gemini-2.5-flash-preview-04-17",
72
  contents=[c["content"] for c in chat],
73
+ config=config
 
 
 
74
  )
75
  ai_code = response.text
76
  chat.append({"role":"assistant", "content": ai_code})
77
 
78
+ # write out the code
79
  with open(filename, "w") as f:
80
  f.write(ai_code)
81
 
82
+ # create/update the Space
83
  repo_id = f"{hf_profile.username}/{hf_profile.username}-auto-space"
84
  create_repo(
85
+ repo_id=repo_id,
86
+ token=hf_token.token,
87
+ exist_ok=True,
88
+ repo_type="space",
89
  space_sdk=sdk_choice
90
  )
91
  upload_file(
92
  path_or_fileobj=filename,
93
  path_in_repo=filename,
94
+ repo_id=repo_id,
95
+ token=hf_token.token,
96
  repo_type="space"
97
  )
98
 
 
100
  build_logs = fetch_logs(repo_id, "build")
101
  run_logs = fetch_logs(repo_id, "run")
102
 
103
+ # stop if no "ERROR" in either log
104
  if "ERROR" not in build_logs.upper() and "ERROR" not in run_logs.upper():
105
  break
106
 
107
+ # else, feed logs back into Gemini to repair
108
  chat.append({
109
  "role":"user",
110
  "content":(
 
115
  })
116
  time.sleep(2)
117
 
118
+ # prepare the outputs
119
  new_history = [(h["role"], h["content"]) for h in chat if h["role"]!="system"]
120
  iframe = f'<iframe src="https://huggingface.co/spaces/{repo_id}" width="100%" height="500px"></iframe>'
121
  return new_history, build_logs, run_logs, iframe
 
136
  api_key = gr.Textbox(label="Gemini API Key", type="password")
137
  grounding = gr.Checkbox(label="Enable grounding", value=False)
138
 
139
+ demo.load(lambda *args: f"Logged in as **{args[0].username}**" if args and args[0] else "*Not logged in.*",
140
  inputs=None, outputs=login_status)
141
  login_btn.click(
142
+ lambda *args: f"Logged in as **{args[0].username}**" if args and args[0] else "*Not logged in.*",
143
  inputs=None, outputs=login_status
144
  )
145