iamrobotbear commited on
Commit
ef3ce27
·
1 Parent(s): c897c5e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -3,9 +3,9 @@ from transformers import AutoProcessor, Blip2ForConditionalGeneration
3
  import torch
4
  from PIL import Image
5
 
6
- # Load the BLIP-2 model and processor
7
  processor = AutoProcessor.from_pretrained("Salesforce/blip2-opt-2.7b")
8
- model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-opt-2.7b")
9
 
10
  # Set device to GPU if available
11
  device = "cuda" if torch.cuda.is_available() else "cpu"
@@ -41,7 +41,7 @@ def blip2_interface(image, prompted_caption_text, vqa_question, chat_context):
41
 
42
  # Define Gradio input and output components
43
  image_input = gr.inputs.Image(type="numpy")
44
- text_input = gr.inputs.Textbox() # Use gr.inputs.Textbox() instead of gr.inputs.Text()
45
  output_text = gr.outputs.Textbox()
46
 
47
  # Create Gradio interface
 
3
  import torch
4
  from PIL import Image
5
 
6
+ # Load the BLIP-2 model and processor with 8-bit loading
7
  processor = AutoProcessor.from_pretrained("Salesforce/blip2-opt-2.7b")
8
+ model = Blip2ForConditionalGeneration.from_pretrained("Salesforce/blip2-opt-2.7b", load_in_8bit=True)
9
 
10
  # Set device to GPU if available
11
  device = "cuda" if torch.cuda.is_available() else "cpu"
 
41
 
42
  # Define Gradio input and output components
43
  image_input = gr.inputs.Image(type="numpy")
44
+ text_input = gr.inputs.Text()
45
  output_text = gr.outputs.Textbox()
46
 
47
  # Create Gradio interface