DerekLiu35 commited on
Commit
9e5447d
·
1 Parent(s): b64eca0

add examples

Browse files
Files changed (2) hide show
  1. .gitattributes +4 -0
  2. app.py +71 -12
.gitattributes CHANGED
@@ -33,3 +33,7 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ examples/astronauts_bnb_8bit.png filter=lfs diff=lfs merge=lfs -text
37
+ examples/astronauts_seed_6456306350371904162.png filter=lfs diff=lfs merge=lfs -text
38
+ examples/watercolor_cat_bnb_8bit.png filter=lfs diff=lfs merge=lfs -text
39
+ examples/watercolor_cat_seed_14269059182221286790.png filter=lfs diff=lfs merge=lfs -text
app.py CHANGED
@@ -3,6 +3,8 @@ import gradio as gr
3
  from diffusers import FluxPipeline, FluxTransformer2DModel
4
  import gc
5
  import random
 
 
6
  from PIL import Image
7
  import os
8
  import time
@@ -214,6 +216,33 @@ def check_guess(user_guess, correct_mapping_state):
214
 
215
  return feedback
216
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
217
 
218
  with gr.Blocks(title="FLUX Quantization Challenge", theme=gr.themes.Soft()) as demo:
219
  gr.Markdown("# FLUX Model Quantization Challenge")
@@ -223,6 +252,13 @@ with gr.Blocks(title="FLUX Quantization Challenge", theme=gr.themes.Soft()) as d
223
  "The images will be shuffled, can you spot which one was quantized?"
224
  )
225
 
 
 
 
 
 
 
 
226
  with gr.Row():
227
  prompt_input = gr.Textbox(label="Enter Prompt", scale=3)
228
  quantization_choice_radio = gr.Radio(
@@ -239,7 +275,7 @@ with gr.Blocks(title="FLUX Quantization Challenge", theme=gr.themes.Soft()) as d
239
  height=512,
240
  object_fit="contain",
241
  allow_preview=True,
242
- show_label=True, # Shows "Image 1", "Image 2" captions we provide
243
  )
244
 
245
  gr.Markdown("### Which image used the selected quantization method?")
@@ -253,21 +289,44 @@ with gr.Blocks(title="FLUX Quantization Challenge", theme=gr.themes.Soft()) as d
253
  # e.g., {0: 'Original', 1: 'Quantized (8-bit)'} or {0: 'Quantized (4-bit)', 1: 'Original'}
254
  correct_mapping_state = gr.State({})
255
 
 
 
 
 
 
 
 
 
 
 
 
 
 
256
  generate_button.click(
257
  fn=generate_images,
258
  inputs=[prompt_input, quantization_choice_radio],
259
- outputs=[output_gallery, correct_mapping_state],
260
- ).then(lambda: "", outputs=[feedback_box]) # clear feedback on new run
 
 
 
 
 
261
 
262
- # helper wrappers so we can supply the fixed choice string
263
- def choose_img1(mapping):
264
- return check_guess("Image 1", mapping)
265
- def choose_img2(mapping):
266
- return check_guess("Image 2", mapping)
267
 
268
- image1_btn.click(choose_img1, inputs=[correct_mapping_state], outputs=[feedback_box])
269
- image2_btn.click(choose_img2, inputs=[correct_mapping_state], outputs=[feedback_box])
 
 
 
 
 
 
 
 
270
 
271
  if __name__ == "__main__":
272
- demo.launch(share=True)
273
- demo.launch()
 
3
  from diffusers import FluxPipeline, FluxTransformer2DModel
4
  import gc
5
  import random
6
+ import glob
7
+ from pathlib import Path
8
  from PIL import Image
9
  import os
10
  import time
 
216
 
217
  return feedback
218
 
219
+ EXAMPLE_DIR = Path(__file__).parent / "examples"
220
+ EXAMPLES = [
221
+ {
222
+ "prompt": "A photorealistic portrait of an astronaut on Mars",
223
+ "files": ["astronauts_seed_6456306350371904162.png", "astronauts_bnb_8bit.png"],
224
+ "quantized_idx": 1, # which of the two files is the quantized result
225
+ },
226
+ {
227
+ "prompt": "Water-color painting of a cat wearing sunglasses",
228
+ "files": ["watercolor_cat_bnb_8bit.png", "watercolor_cat_seed_14269059182221286790.png"],
229
+ "quantized_idx": 0,
230
+ },
231
+ # {
232
+ # "prompt": "Neo-tokyo cyberpunk cityscape at night, rain-soaked streets, 8-K",
233
+ # "files": ["cyber_city_q.jpg", "cyber_city.jpg"],
234
+ # "quantized_idx": 0,
235
+ # },
236
+ ]
237
+
238
+ def load_example(idx):
239
+ """Return [(PIL.Image, caption)...], mapping dict, and feedback string"""
240
+ ex = EXAMPLES[idx]
241
+ imgs = [Image.open(EXAMPLE_DIR / f) for f in ex["files"]]
242
+ gallery_items = [(img, f"Image {i+1}") for i, img in enumerate(imgs)]
243
+ mapping = {i: ("Quantized" if i == ex["quantized_idx"] else "Original")
244
+ for i in range(2)}
245
+ return gallery_items, mapping, f"{ex['prompt']}"
246
 
247
  with gr.Blocks(title="FLUX Quantization Challenge", theme=gr.themes.Soft()) as demo:
248
  gr.Markdown("# FLUX Model Quantization Challenge")
 
252
  "The images will be shuffled, can you spot which one was quantized?"
253
  )
254
 
255
+ gr.Markdown("### Examples")
256
+ ex_selector = gr.Radio(
257
+ choices=[f"Example {i+1}" for i in range(len(EXAMPLES))],
258
+ label="Choose an example prompt",
259
+ interactive=True,
260
+ )
261
+ gr.Markdown("### …or create your own comparison")
262
  with gr.Row():
263
  prompt_input = gr.Textbox(label="Enter Prompt", scale=3)
264
  quantization_choice_radio = gr.Radio(
 
275
  height=512,
276
  object_fit="contain",
277
  allow_preview=True,
278
+ show_label=True,
279
  )
280
 
281
  gr.Markdown("### Which image used the selected quantization method?")
 
289
  # e.g., {0: 'Original', 1: 'Quantized (8-bit)'} or {0: 'Quantized (4-bit)', 1: 'Original'}
290
  correct_mapping_state = gr.State({})
291
 
292
+ def _load_example(sel):
293
+ idx = int(sel.split()[-1]) - 1
294
+ return load_example(idx)
295
+
296
+ ex_selector.change(
297
+ fn=_load_example,
298
+ inputs=ex_selector,
299
+ outputs=[output_gallery, correct_mapping_state, prompt_input],
300
+ ).then(
301
+ lambda: (gr.update(interactive=True), gr.update(interactive=True)),
302
+ outputs=[image1_btn, image2_btn],
303
+ )
304
+
305
  generate_button.click(
306
  fn=generate_images,
307
  inputs=[prompt_input, quantization_choice_radio],
308
+ outputs=[output_gallery, correct_mapping_state] #, feedback_box],
309
+ ).then(
310
+ lambda: (gr.update(interactive=True),
311
+ gr.update(interactive=True),
312
+ ""), # clear feedback
313
+ outputs=[image1_btn, image2_btn, feedback_box],
314
+ )
315
 
316
+ def choose(choice_string, mapping):
317
+ feedback = check_guess(choice_string, mapping)
318
+ return feedback, gr.update(interactive=False), gr.update(interactive=False)
 
 
319
 
320
+ image1_btn.click(
321
+ fn=lambda mapping: choose("Image 1", mapping),
322
+ inputs=[correct_mapping_state],
323
+ outputs=[feedback_box, image1_btn, image2_btn],
324
+ )
325
+ image2_btn.click(
326
+ fn=lambda mapping: choose("Image 2", mapping),
327
+ inputs=[correct_mapping_state],
328
+ outputs=[feedback_box, image1_btn, image2_btn],
329
+ )
330
 
331
  if __name__ == "__main__":
332
+ demo.launch(share=True)