Ruurd commited on
Commit
12ed3ea
·
verified ·
1 Parent(s): 2798cf6

Remove BidirectionalLlamaAttentionLayer input

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -7,7 +7,7 @@ from transformers import AutoTokenizer
7
  import os
8
  import importlib
9
  from huggingface_hub import hf_hub_download
10
- from llama_diffusion_model import CustomTransformerModel, CustomTransformerConfig, BidirectionalLlamaAttention, disable_dropout
11
  import spaces
12
 
13
  hf_token = os.getenv("HF_TOKEN")
 
7
  import os
8
  import importlib
9
  from huggingface_hub import hf_hub_download
10
+ from llama_diffusion_model import CustomTransformerModel, CustomTransformerConfig, disable_dropout
11
  import spaces
12
 
13
  hf_token = os.getenv("HF_TOKEN")