Llama4TextMoe: mark as not having backward
Browse files- torch-ext/moe/layers.py +2 -0
torch-ext/moe/layers.py
CHANGED
@@ -25,6 +25,8 @@ def _fix_llama4_experts(hidden_states: torch.Tensor, experts: nn.Module):
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
|
|
|
|
28 |
experts: nn.Module
|
29 |
router: nn.Linear
|
30 |
shared_expert: nn.Module
|
|
|
25 |
|
26 |
|
27 |
class Llama4TextMoe(nn.Module):
|
28 |
+
has_backward = False
|
29 |
+
|
30 |
experts: nn.Module
|
31 |
router: nn.Linear
|
32 |
shared_expert: nn.Module
|