ameerazam08 commited on
Commit
d7ca99f
·
verified ·
1 Parent(s): 3aa4a89

updated for value

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -29,7 +29,7 @@ if not torch.cuda.is_available():
29
 
30
  MAX_SEED = np.iinfo(np.int32).max
31
  CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "0") == "1"
32
- MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "1024"))
33
  USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE") == "1"
34
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
35
  ENABLE_REFINER = os.getenv("ENABLE_REFINER", "0")#
@@ -85,9 +85,9 @@ def generate(
85
  width: int = 1024,
86
  height: int = 1024,
87
  guidance_scale_base: float = 5.0,
88
- guidance_scale_refiner: float = 5.0,
89
- num_inference_steps_base: int = 25,
90
- num_inference_steps_refiner: int = 25,
91
  apply_refiner: bool = False,
92
  progress=gr.Progress(track_tqdm=True),
93
  ) -> PIL.Image.Image:
@@ -234,14 +234,14 @@ with gr.Blocks(css="footer{display:none !important}", theme=theme) as demo:
234
  minimum=1,
235
  maximum=20,
236
  step=0.1,
237
- value=5.0,
238
  )
239
  num_inference_steps_refiner = gr.Slider(
240
  label="Number of inference steps for refiner",
241
  minimum=10,
242
  maximum=100,
243
  step=1,
244
- value=25,
245
  )
246
 
247
  gr.Examples(
 
29
 
30
  MAX_SEED = np.iinfo(np.int32).max
31
  CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "0") == "1"
32
+ MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "2024"))
33
  USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE") == "1"
34
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
35
  ENABLE_REFINER = os.getenv("ENABLE_REFINER", "0")#
 
85
  width: int = 1024,
86
  height: int = 1024,
87
  guidance_scale_base: float = 5.0,
88
+ guidance_scale_refiner: float = 7.0,
89
+ num_inference_steps_base: int = 60,
90
+ num_inference_steps_refiner: int = 35,
91
  apply_refiner: bool = False,
92
  progress=gr.Progress(track_tqdm=True),
93
  ) -> PIL.Image.Image:
 
234
  minimum=1,
235
  maximum=20,
236
  step=0.1,
237
+ value=7.5,
238
  )
239
  num_inference_steps_refiner = gr.Slider(
240
  label="Number of inference steps for refiner",
241
  minimum=10,
242
  maximum=100,
243
  step=1,
244
+ value=30,
245
  )
246
 
247
  gr.Examples(