Spaces:
Running
on
Zero
Running
on
Zero
Add spaces decorator
Browse files
app.py
CHANGED
@@ -9,6 +9,7 @@ import os
|
|
9 |
import importlib
|
10 |
from huggingface_hub import hf_hub_download
|
11 |
from llama_diffusion_model import CustomTransformerModel, CustomTransformerConfig, BidirectionalLlamaAttention, disable_dropout
|
|
|
12 |
|
13 |
hf_token = os.getenv("HF_TOKEN")
|
14 |
|
@@ -25,7 +26,7 @@ with open("token_probabilities.json") as f:
|
|
25 |
token_probs_dict = json.load(f)
|
26 |
token_probabilities = np.array([token_probs_dict[str(i)] for i in range(len(token_probs_dict))], dtype=np.float32)
|
27 |
|
28 |
-
|
29 |
def load_model():
|
30 |
ckpt_path = hf_hub_download(
|
31 |
repo_id="ruurd/tini_model",
|
|
|
9 |
import importlib
|
10 |
from huggingface_hub import hf_hub_download
|
11 |
from llama_diffusion_model import CustomTransformerModel, CustomTransformerConfig, BidirectionalLlamaAttention, disable_dropout
|
12 |
+
import spaces
|
13 |
|
14 |
hf_token = os.getenv("HF_TOKEN")
|
15 |
|
|
|
26 |
token_probs_dict = json.load(f)
|
27 |
token_probabilities = np.array([token_probs_dict[str(i)] for i in range(len(token_probs_dict))], dtype=np.float32)
|
28 |
|
29 |
+
@spaces.GPU
|
30 |
def load_model():
|
31 |
ckpt_path = hf_hub_download(
|
32 |
repo_id="ruurd/tini_model",
|