cwhuh commited on
Commit
52ee639
·
1 Parent(s): 63fb7f7

test : minor changes

Browse files
__pycache__/llm_wrapper.cpython-310.pyc CHANGED
Binary files a/__pycache__/llm_wrapper.cpython-310.pyc and b/__pycache__/llm_wrapper.cpython-310.pyc differ
 
app.py CHANGED
@@ -7,7 +7,7 @@ from diffusers import DiffusionPipeline, FlowMatchEulerDiscreteScheduler, Autoe
7
  from transformers import CLIPTextModel, CLIPTokenizer,T5EncoderModel, T5TokenizerFast
8
  from live_preview_helpers import calculate_shift, retrieve_timesteps, flux_pipe_call_that_returns_an_iterable_of_images
9
 
10
- from llm_wrapper import run_gemini
11
  from huggingface_hub import hf_hub_download
12
  from safetensors.torch import load_file
13
  import subprocess
@@ -41,10 +41,10 @@ def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, guidan
41
  seed = random.randint(0, MAX_SEED)
42
  generator = torch.Generator().manual_seed(seed)
43
 
44
- refined_prompt = run_gemini(
45
- target_prompt=prompt,
46
- prompt_in_path="prompt.json",
47
- )
48
 
49
  for img in pipe.flux_pipe_call_that_returns_an_iterable_of_images(
50
  prompt=refined_prompt,
 
7
  from transformers import CLIPTextModel, CLIPTokenizer,T5EncoderModel, T5TokenizerFast
8
  from live_preview_helpers import calculate_shift, retrieve_timesteps, flux_pipe_call_that_returns_an_iterable_of_images
9
 
10
+ # from llm_wrapper import run_gemini
11
  from huggingface_hub import hf_hub_download
12
  from safetensors.torch import load_file
13
  import subprocess
 
41
  seed = random.randint(0, MAX_SEED)
42
  generator = torch.Generator().manual_seed(seed)
43
 
44
+ # refined_prompt = run_gemini(
45
+ # target_prompt=prompt,
46
+ # prompt_in_path="prompt.json",
47
+ # )
48
 
49
  for img in pipe.flux_pipe_call_that_returns_an_iterable_of_images(
50
  prompt=refined_prompt,