kernel
danieldk HF Staff commited on
Commit
c558d47
·
1 Parent(s): e84674f

Llama4TextMoe: mark as not having backward

Browse files
Files changed (1) hide show
  1. torch-ext/moe/layers.py +2 -0
torch-ext/moe/layers.py CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
25
 
26
 
27
  class Llama4TextMoe(nn.Module):
 
 
28
  experts: nn.Module
29
  router: nn.Linear
30
  shared_expert: nn.Module
 
25
 
26
 
27
  class Llama4TextMoe(nn.Module):
28
+ has_backward = False
29
+
30
  experts: nn.Module
31
  router: nn.Linear
32
  shared_expert: nn.Module