fix: llama-3
Browse files- src/model/model_llava.py +1 -1
src/model/model_llava.py
CHANGED
@@ -114,7 +114,7 @@ def inference_by_prompt_and_images_fire(prompt, images):
|
|
114 |
with torch.inference_mode():
|
115 |
cont = model_llava_fire.generate(
|
116 |
input_ids,
|
117 |
-
images=image_tensor,
|
118 |
image_sizes=image_sizes,
|
119 |
do_sample=False,
|
120 |
temperature=0,
|
|
|
114 |
with torch.inference_mode():
|
115 |
cont = model_llava_fire.generate(
|
116 |
input_ids,
|
117 |
+
images=[image_tensor.squeeze(dim=0)],
|
118 |
image_sizes=image_sizes,
|
119 |
do_sample=False,
|
120 |
temperature=0,
|