Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -31,6 +31,7 @@ import jax.numpy as jnp
|
|
31 |
import flax.linen as nn
|
32 |
|
33 |
from transformers import PaliGemmaForConditionalGeneration, PaliGemmaProcessor
|
|
|
34 |
from huggingface_hub import login
|
35 |
import spaces
|
36 |
|
@@ -41,8 +42,14 @@ login(token=hf_token, add_to_git_credential=True)
|
|
41 |
|
42 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
43 |
|
44 |
-
model_id = "
|
|
|
45 |
model = PaliGemmaForConditionalGeneration.from_pretrained(model_id).eval().to(device)
|
|
|
|
|
|
|
|
|
|
|
46 |
processor = PaliGemmaProcessor.from_pretrained(model_id)
|
47 |
|
48 |
@spaces.GPU
|
|
|
31 |
import flax.linen as nn
|
32 |
|
33 |
from transformers import PaliGemmaForConditionalGeneration, PaliGemmaProcessor
|
34 |
+
from peft import PeftConfig, PeftModel
|
35 |
from huggingface_hub import login
|
36 |
import spaces
|
37 |
|
|
|
42 |
|
43 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
44 |
|
45 |
+
model_id = "google/paligemma-3b-pt-224"
|
46 |
+
adapter_model_id = "dwb2023/paligemma-cnmc-ft"
|
47 |
model = PaliGemmaForConditionalGeneration.from_pretrained(model_id).eval().to(device)
|
48 |
+
model = PeftModel.from_pretrained(model, adapter_model_name).to(device)
|
49 |
+
|
50 |
+
model = model.merge_and_unload()
|
51 |
+
model.save_pretrained("merged_adapters")
|
52 |
+
|
53 |
processor = PaliGemmaProcessor.from_pretrained(model_id)
|
54 |
|
55 |
@spaces.GPU
|