cwhuh commited on
Commit
57fc112
·
1 Parent(s): 5e4548b

add : display refinement prompt

Browse files
Files changed (1) hide show
  1. app.py +9 -3
app.py CHANGED
@@ -12,6 +12,11 @@ from huggingface_hub import hf_hub_download
12
  from safetensors.torch import load_file
13
  import subprocess
14
 
 
 
 
 
 
15
  subprocess.run("rm -rf /data-nvme/zerogpu-offload/*", env={}, shell=True)
16
 
17
 
@@ -56,7 +61,7 @@ def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, guidan
56
  output_type="pil",
57
  good_vae=good_vae,
58
  ):
59
- yield img, seed
60
 
61
  examples = [
62
  "기계공학과(로켓) 포닉스",
@@ -91,6 +96,7 @@ with gr.Blocks(css=css) as demo:
91
  run_button = gr.Button("Run", scale=0)
92
 
93
  result = gr.Image(label="Result", show_label=False)
 
94
 
95
  with gr.Accordion("Advanced Settings", open=False):
96
 
@@ -144,7 +150,7 @@ with gr.Blocks(css=css) as demo:
144
  examples = examples,
145
  fn = infer,
146
  inputs = [prompt],
147
- outputs = [result, seed],
148
  cache_examples="lazy"
149
  )
150
 
@@ -152,7 +158,7 @@ with gr.Blocks(css=css) as demo:
152
  triggers=[run_button.click, prompt.submit],
153
  fn = infer,
154
  inputs = [prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
155
- outputs = [result, seed]
156
  )
157
 
158
  demo.launch()
 
12
  from safetensors.torch import load_file
13
  import subprocess
14
 
15
+ from pydantic import BaseModel
16
+
17
+ class RefinedPrompt(BaseModel):
18
+ prompt:str
19
+
20
  subprocess.run("rm -rf /data-nvme/zerogpu-offload/*", env={}, shell=True)
21
 
22
 
 
61
  output_type="pil",
62
  good_vae=good_vae,
63
  ):
64
+ yield img, seed, refined_prompt
65
 
66
  examples = [
67
  "기계공학과(로켓) 포닉스",
 
96
  run_button = gr.Button("Run", scale=0)
97
 
98
  result = gr.Image(label="Result", show_label=False)
99
+ refined_prompt_display = gr.Text(label="정제된 프롬프트", show_label=True)
100
 
101
  with gr.Accordion("Advanced Settings", open=False):
102
 
 
150
  examples = examples,
151
  fn = infer,
152
  inputs = [prompt],
153
+ outputs = [result, seed, refined_prompt_display],
154
  cache_examples="lazy"
155
  )
156
 
 
158
  triggers=[run_button.click, prompt.submit],
159
  fn = infer,
160
  inputs = [prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
161
+ outputs = [result, seed, refined_prompt_display]
162
  )
163
 
164
  demo.launch()