Spaces:
Running on Zero

Ruurd commited on
Commit
f2ca6a6
·
verified ·
1 Parent(s): b43e862

Create safe fallback for models not yet initialized with masking_type

Browse files
Files changed (1) hide show
  1. llama_diffusion_model.py +2 -0
llama_diffusion_model.py CHANGED
@@ -120,6 +120,8 @@ class CustomTransformerModel(PreTrainedModel):
120
 
121
  # Build attention mask
122
  device = input_ids.device
 
 
123
  if self.config.masking_type == 'bidirectional':
124
  base_mask = torch.ones(seq_len, seq_len, dtype=torch.bool, device=device)
125
  elif self.config.masking_type == 'bidirectional_masked':
 
120
 
121
  # Build attention mask
122
  device = input_ids.device
123
+
124
+ masking_type = getattr(self.config, "masking_type", "bidirectional_masked")
125
  if self.config.masking_type == 'bidirectional':
126
  base_mask = torch.ones(seq_len, seq_len, dtype=torch.bool, device=device)
127
  elif self.config.masking_type == 'bidirectional_masked':