Spaces:
Running
on
Zero
Running
on
Zero
Change model
Browse files
app.py
CHANGED
@@ -27,10 +27,9 @@ token_probabilities = np.array([token_probs_dict[str(i)] for i in range(len(toke
|
|
27 |
|
28 |
def load_model():
|
29 |
ckpt_path = hf_hub_download(
|
30 |
-
repo_id="ruurd/
|
31 |
filename="diffusion-model.pth",
|
32 |
-
token=os.getenv("HF_TOKEN")
|
33 |
-
revision="8bb2d44"
|
34 |
)
|
35 |
|
36 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
@@ -127,9 +126,6 @@ def confidence_guided_noising(input_ids, answer_start, confidences, noise_clippi
|
|
127 |
|
128 |
return noised
|
129 |
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
@spaces.GPU
|
134 |
def generate_diffusion_text(input_ids):
|
135 |
with torch.no_grad():
|
|
|
27 |
|
28 |
def load_model():
|
29 |
ckpt_path = hf_hub_download(
|
30 |
+
repo_id="ruurd/tini_bi_m",
|
31 |
filename="diffusion-model.pth",
|
32 |
+
token=os.getenv("HF_TOKEN")
|
|
|
33 |
)
|
34 |
|
35 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
|
126 |
|
127 |
return noised
|
128 |
|
|
|
|
|
|
|
129 |
@spaces.GPU
|
130 |
def generate_diffusion_text(input_ids):
|
131 |
with torch.no_grad():
|