Commit
·
33f9417
1
Parent(s):
b71bed3
I don't think this will work, trying to use blip-itm-large-coco
Browse files
app.py
CHANGED
@@ -7,14 +7,22 @@ from PIL import Image
|
|
7 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
8 |
|
9 |
|
10 |
-
# Load the BLIP-2 model and processor
|
11 |
-
processor = AutoProcessor.from_pretrained("Salesforce/
|
12 |
# Load model in int8 using bitsandbytes, and pass device_map='auto'
|
13 |
model = Blip2ForConditionalGeneration.from_pretrained(
|
14 |
-
"Salesforce/
|
15 |
)
|
16 |
|
17 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
|
19 |
# Load the BLIP-2 model and processor
|
20 |
#processor = AutoProcessor.from_pretrained("Salesforce/blip2-opt-2.7b")
|
|
|
7 |
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
8 |
|
9 |
|
10 |
+
# Load the BLIP-2 model and processor (needs A10G)
|
11 |
+
processor = AutoProcessor.from_pretrained("Salesforce/blip-itm-large-coco")
|
12 |
# Load model in int8 using bitsandbytes, and pass device_map='auto'
|
13 |
model = Blip2ForConditionalGeneration.from_pretrained(
|
14 |
+
"Salesforce/blip-itm-large-coco", load_in_8bit=True, device_map='auto'
|
15 |
)
|
16 |
|
17 |
+
|
18 |
+
# Load the BLIP-2 model and processor (needs A10G)
|
19 |
+
#processor = AutoProcessor.from_pretrained("Salesforce/blip2-opt-6.7b-coco")
|
20 |
+
# Load model in int8 using bitsandbytes, and pass device_map='auto'
|
21 |
+
#model = Blip2ForConditionalGeneration.from_pretrained(
|
22 |
+
# "Salesforce/blip2-opt-6.7b-coco", load_in_8bit=True, device_map='auto'
|
23 |
+
#)
|
24 |
+
|
25 |
+
# Uncomment lines 20, 22, 23, & 24 to begin using blip2-oopt-2.7b model (can run on T4 Medium)
|
26 |
|
27 |
# Load the BLIP-2 model and processor
|
28 |
#processor = AutoProcessor.from_pretrained("Salesforce/blip2-opt-2.7b")
|