cwhuh commited on
Commit
608d040
·
1 Parent(s): 2bde0d4

chore : fix lora version to checkpoint-2500

Browse files
.gradio/cached_examples/27/indices.csv ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ 1
2
+ 3
3
+ 0
4
+ 2
.gradio/cached_examples/27/log.csv ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ 생성된 이미지,시드 값,timestamp
2
+ "{""path"": "".gradio/cached_examples/27/\uc0dd\uc131\ub41c \uc774\ubbf8\uc9c0/95eb27ceee7753837396/image.webp"", ""url"": ""/gradio_api/file=/tmp/gradio/76d3d1e129de235693825e4b383dbaf7ac525e81a5bcb748f8d2c59fb3c60c0e/image.webp"", ""size"": null, ""orig_name"": ""image.webp"", ""mime_type"": null, ""is_stream"": false, ""meta"": {""_type"": ""gradio.FileData""}}",42,2025-03-11 04:17:53.705536
3
+ "{""path"": "".gradio/cached_examples/27/\uc0dd\uc131\ub41c \uc774\ubbf8\uc9c0/43a835dc59850af93063/image.webp"", ""url"": ""/gradio_api/file=/tmp/gradio/ab92ba73b48096e370582b5944bb4c2a4f7c9b44ff39006b330cddff88d4e8f4/image.webp"", ""size"": null, ""orig_name"": ""image.webp"", ""mime_type"": null, ""is_stream"": false, ""meta"": {""_type"": ""gradio.FileData""}}",42,2025-03-11 04:32:13.272739
4
+ "{""path"": "".gradio/cached_examples/27/\uc0dd\uc131\ub41c \uc774\ubbf8\uc9c0/f238e9186b6ac8f19f2e/image.webp"", ""url"": ""/gradio_api/file=/tmp/gradio/99e30bbb66cddca6ea59a95c5cfd5d0db4ead0f1ff99232dcf3a7b81c91f869e/image.webp"", ""size"": null, ""orig_name"": ""image.webp"", ""mime_type"": null, ""is_stream"": false, ""meta"": {""_type"": ""gradio.FileData""}}",42,2025-03-11 04:50:22.523038
5
+ "{""path"": "".gradio/cached_examples/27/\uc0dd\uc131\ub41c \uc774\ubbf8\uc9c0/8d6e85b2196f4444ad89/image.webp"", ""url"": ""/gradio_api/file=/tmp/gradio/ab9408cf5b657fe2280ed7ba4d42b0921f22192f66e9cd6628283d6ef1766097/image.webp"", ""size"": null, ""orig_name"": ""image.webp"", ""mime_type"": null, ""is_stream"": false, ""meta"": {""_type"": ""gradio.FileData""}}",42,2025-03-11 06:28:06.332924
.gradio/cached_examples/27//354/203/235/354/204/261/353/220/234 /354/235/264/353/257/270/354/247/200/43a835dc59850af93063/image.webp ADDED
.gradio/cached_examples/27//354/203/235/354/204/261/353/220/234 /354/235/264/353/257/270/354/247/200/8d6e85b2196f4444ad89/image.webp ADDED
.gradio/cached_examples/27//354/203/235/354/204/261/353/220/234 /354/235/264/353/257/270/354/247/200/95eb27ceee7753837396/image.webp ADDED
.gradio/cached_examples/27//354/203/235/354/204/261/353/220/234 /354/235/264/353/257/270/354/247/200/f238e9186b6ac8f19f2e/image.webp ADDED
__pycache__/live_preview_helpers.cpython-310.pyc CHANGED
Binary files a/__pycache__/live_preview_helpers.cpython-310.pyc and b/__pycache__/live_preview_helpers.cpython-310.pyc differ
 
__pycache__/llm_wrapper.cpython-310.pyc CHANGED
Binary files a/__pycache__/llm_wrapper.cpython-310.pyc and b/__pycache__/llm_wrapper.cpython-310.pyc differ
 
app.py CHANGED
@@ -24,7 +24,7 @@ good_vae = AutoencoderKL.from_pretrained("black-forest-labs/FLUX.1-dev", subfold
24
  pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=dtype, vae=taef1).to(device)
25
 
26
  # PONIX mode load
27
- pipe.load_lora_weights('cwhuh/ponix-generator-v0.1.0', weight_name='pytorch_lora_weights.safetensors')
28
  embedding_path = hf_hub_download(repo_id='cwhuh/ponix-generator-v0.1.0', filename='./ponix-generator-v0.1.0_emb.safetensors', repo_type="model")
29
  state_dict = load_file(embedding_path)
30
  pipe.load_textual_inversion(state_dict["clip_l"], token=["<s0>", "<s1>", "<s2>"], text_encoder=pipe.text_encoder, tokenizer=pipe.tokenizer)
 
24
  pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=dtype, vae=taef1).to(device)
25
 
26
  # PONIX mode load
27
+ pipe.load_lora_weights('cwhuh/ponix-generator-v0.1.0', weight_name='pytorch_lora_weights.safetensors', subfolder="checkpoint-2500")
28
  embedding_path = hf_hub_download(repo_id='cwhuh/ponix-generator-v0.1.0', filename='./ponix-generator-v0.1.0_emb.safetensors', repo_type="model")
29
  state_dict = load_file(embedding_path)
30
  pipe.load_textual_inversion(state_dict["clip_l"], token=["<s0>", "<s1>", "<s2>"], text_encoder=pipe.text_encoder, tokenizer=pipe.tokenizer)