danieldk HF Staff commited on
Commit
bbdc1b4
·
1 Parent(s): 8fa0045
Files changed (49) hide show
  1. build/torch25-cxx11-cu118-x86_64-linux/activation/{_activation_9e96ad7.abi3.so → _activation_78448fa.abi3.so} +2 -2
  2. build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py +3 -3
  3. build/torch25-cxx11-cu121-x86_64-linux/activation/{_activation_9e96ad7.abi3.so → _activation_78448fa.abi3.so} +2 -2
  4. build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py +3 -3
  5. build/torch25-cxx11-cu124-x86_64-linux/activation/{_activation_9e96ad7.abi3.so → _activation_78448fa.abi3.so} +2 -2
  6. build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py +3 -3
  7. build/torch25-cxx98-cu118-x86_64-linux/activation/{_activation_9e96ad7.abi3.so → _activation_78448fa.abi3.so} +2 -2
  8. build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py +3 -3
  9. build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_78448fa.abi3.so +3 -0
  10. build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_9e96ad7.abi3.so +0 -3
  11. build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py +3 -3
  12. build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_78448fa.abi3.so +3 -0
  13. build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_9e96ad7.abi3.so +0 -3
  14. build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py +3 -3
  15. build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_78448fa.abi3.so +3 -0
  16. build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_9e96ad7.abi3.so +0 -3
  17. build/torch26-cxx11-cu118-x86_64-linux/activation/_ops.py +3 -3
  18. build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_78448fa.abi3.so +3 -0
  19. build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_9e96ad7.abi3.so +0 -3
  20. build/torch26-cxx11-cu124-x86_64-linux/activation/_ops.py +3 -3
  21. build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_78448fa.abi3.so +3 -0
  22. build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_9e96ad7.abi3.so +0 -3
  23. build/torch26-cxx11-cu126-x86_64-linux/activation/_ops.py +3 -3
  24. build/torch26-cxx11-rocm62-x86_64-linux/activation/__init__.py +0 -52
  25. build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_9e96ad7.abi3.so +0 -3
  26. build/torch26-cxx11-rocm62-x86_64-linux/activation/_ops.py +0 -9
  27. build/torch26-cxx11-rocm62-x86_64-linux/activation/layers.py +0 -65
  28. build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_78448fa.abi3.so +3 -0
  29. build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_9e96ad7.abi3.so +0 -3
  30. build/torch26-cxx98-cu118-x86_64-linux/activation/_ops.py +3 -3
  31. build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_78448fa.abi3.so +3 -0
  32. build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_9e96ad7.abi3.so +0 -3
  33. build/torch26-cxx98-cu124-x86_64-linux/activation/_ops.py +3 -3
  34. build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_78448fa.abi3.so +3 -0
  35. build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_9e96ad7.abi3.so +0 -3
  36. build/torch26-cxx98-cu126-x86_64-linux/activation/_ops.py +3 -3
  37. build/torch27-cxx11-cu118-x86_64-linux/activation/_activation_78448fa.abi3.so +3 -0
  38. build/torch27-cxx11-cu118-x86_64-linux/activation/_activation_9e96ad7.abi3.so +0 -3
  39. build/torch27-cxx11-cu118-x86_64-linux/activation/_ops.py +3 -3
  40. build/torch27-cxx11-cu126-x86_64-linux/activation/_activation_78448fa.abi3.so +3 -0
  41. build/torch27-cxx11-cu126-x86_64-linux/activation/_activation_9e96ad7.abi3.so +0 -3
  42. build/torch27-cxx11-cu126-x86_64-linux/activation/_ops.py +3 -3
  43. build/torch27-cxx11-cu128-x86_64-linux/activation/_activation_78448fa.abi3.so +3 -0
  44. build/torch27-cxx11-cu128-x86_64-linux/activation/_activation_9e96ad7.abi3.so +0 -3
  45. build/torch27-cxx11-cu128-x86_64-linux/activation/_ops.py +3 -3
  46. build/torch27-cxx11-rocm63-x86_64-linux/activation/__init__.py +0 -52
  47. build/torch27-cxx11-rocm63-x86_64-linux/activation/_activation_9e96ad7.abi3.so +0 -3
  48. build/torch27-cxx11-rocm63-x86_64-linux/activation/_ops.py +0 -9
  49. build/torch27-cxx11-rocm63-x86_64-linux/activation/layers.py +0 -65
build/torch25-cxx11-cu118-x86_64-linux/activation/{_activation_9e96ad7.abi3.so → _activation_78448fa.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:884c4d8092d9d24f909d971a57846a3f534137efd219ce9a6049579691872304
3
- size 2370128
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:323dbf69b89390fd46b207abc1314a4cbe27491e1bb9f026c840bc3bff43b7d3
3
+ size 2447952
build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch25-cxx11-cu121-x86_64-linux/activation/{_activation_9e96ad7.abi3.so → _activation_78448fa.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a4af5bb0a6708d0b035cc48c0d413f70b08bad41a97025507d243c13d60f3fbf
3
- size 2393232
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6146ac6e77cbd458560bf67c46d93217833f2caf08260cc80a4aa62ba5645ee9
3
+ size 2471056
build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch25-cxx11-cu124-x86_64-linux/activation/{_activation_9e96ad7.abi3.so → _activation_78448fa.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:27e4d8e0411e88317f61a7191ed896d966542be211dafd91ccff610d4617b143
3
- size 2427912
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28eea3907055742f99bc9d7d4260add848adc2f6464e97029f37cd42a5c6bd0a
3
+ size 2509832
build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch25-cxx98-cu118-x86_64-linux/activation/{_activation_9e96ad7.abi3.so → _activation_78448fa.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c15107f5e85897098fe1463c54b428c74a7bf21e5c8e87df3eb88656e756a04a
3
- size 2362568
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5609ad07903b98c83c297bfb64f0d944df5edfe1c611fee23ec6c8fbd952604
3
+ size 2440392
build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_78448fa.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e6475ed603ad2cb565bd19ad2554484bd6c00d0d3f02decff60f2285df2546f
3
+ size 2463232
build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_9e96ad7.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8f22bb647600ef6b3f1b7f90adc12810b59aa3aad483886d29198ffe8b5c96bc
3
- size 2385408
 
 
 
 
build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_78448fa.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0767f6dba00c543d3cb77e2044bccd32ef569abc55b921231112c8a1ddfb187
3
+ size 2502088
build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_9e96ad7.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:da1cbd43dbbe6b615414630ca0e02b2ec3bc0678327297d53d1dfc8291b7e068
3
- size 2420168
 
 
 
 
build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_78448fa.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0c04d860454cc565113a3c93ff755fe9cbba0578c4604b89ad89e47c2503932
3
+ size 2448056
build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_9e96ad7.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:48a70ec24de276cd135fde9c19b5f350ee75fc320ab17b86094ec07623dd11c5
3
- size 2370232
 
 
 
 
build/torch26-cxx11-cu118-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_78448fa.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48d7b0d190af1dd0366dbaeb0690b9c7cd1dfdc9aeda9b0b23bce56c70f5cbae
3
+ size 2509928
build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_9e96ad7.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b3297aa880f085a936f0d243642ad1e15004e3ca50fa0b99f7cd07e98cd5d2b8
3
- size 2428008
 
 
 
 
build/torch26-cxx11-cu124-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_78448fa.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11a11d0f4119edc5c637bab04ebd5669750a0e4f4000f58ab1bf5be2d8d9ab0b
3
+ size 2518568
build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_9e96ad7.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:24ad961244d4c61af7911d2487e40952df2f1d871ed211d918b7a471c3c18237
3
- size 2436648
 
 
 
 
build/torch26-cxx11-cu126-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch26-cxx11-rocm62-x86_64-linux/activation/__init__.py DELETED
@@ -1,52 +0,0 @@
1
- import torch
2
-
3
- from ._ops import ops
4
-
5
- from . import layers
6
-
7
-
8
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
9
- ops.silu_and_mul(out, x)
10
- return out
11
-
12
-
13
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- ops.gelu_and_mul(out, x)
15
- return out
16
-
17
-
18
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_tanh_and_mul(out, x)
20
- return out
21
-
22
-
23
- def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
24
- ops.fatrelu_and_mul(out, x, threshold)
25
- return out
26
-
27
-
28
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
29
- ops.gelu_fast(out, x)
30
- return out
31
-
32
-
33
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
34
- ops.gelu_new(out, x)
35
- return out
36
-
37
-
38
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
39
- ops.gelu_quick(out, x)
40
- return out
41
-
42
-
43
- __all__ = [
44
- "silu_and_mul",
45
- "gelu_and_mul",
46
- "gelu_tanh_and_mul",
47
- "fatrelu_and_mul",
48
- "gelu_fast",
49
- "gelu_new",
50
- "gelu_quick",
51
- "layers",
52
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_9e96ad7.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:84d554831ac261c589d4fce44b43be87a4e433f288a486bfab011b8e3300acd8
3
- size 2465760
 
 
 
 
build/torch26-cxx11-rocm62-x86_64-linux/activation/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_activation_9e96ad7::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch26-cxx11-rocm62-x86_64-linux/activation/layers.py DELETED
@@ -1,65 +0,0 @@
1
- import torch
2
- import torch.nn as nn
3
-
4
- from ._ops import ops
5
-
6
-
7
- class SiluAndMul(nn.Module):
8
- def forward(self, x: torch.Tensor):
9
- d = x.shape[-1] // 2
10
- output_shape = x.shape[:-1] + (d,)
11
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
12
- ops.silu_and_mul(out, x)
13
- return out
14
-
15
-
16
- class GeluAndMul(nn.Module):
17
- def forward(self, x: torch.Tensor):
18
- d = x.shape[-1] // 2
19
- output_shape = x.shape[:-1] + (d,)
20
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
21
- ops.gelu_and_mul(out, x)
22
- return out
23
-
24
-
25
- class GeluTanhAndMul(nn.Module):
26
- def forward(self, x: torch.Tensor):
27
- d = x.shape[-1] // 2
28
- output_shape = x.shape[:-1] + (d,)
29
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
30
- ops.gelu_tanh_and_mul(out, x)
31
- return out
32
-
33
-
34
- class FatreluAndMul(nn.Module):
35
- def __init__(self, threshold: float = 0.0):
36
- super().__init__()
37
- self.threshold = threshold
38
-
39
- def forward(self, x: torch.Tensor):
40
- d = x.shape[-1] // 2
41
- output_shape = x.shape[:-1] + (d,)
42
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
43
- ops.fatrelu_and_mul(out, x, self.threshold)
44
- return out
45
-
46
-
47
- class FastGELU(nn.Module):
48
- def forward(self, x: torch.Tensor) -> torch.Tensor:
49
- out = torch.empty_like(x)
50
- ops.gelu_fast(out, x)
51
- return out
52
-
53
-
54
- class NewGELU(nn.Module):
55
- def forward(self, x: torch.Tensor) -> torch.Tensor:
56
- out = torch.empty_like(x)
57
- ops.gelu_new(out, x)
58
- return out
59
-
60
-
61
- class QuickGELU(nn.Module):
62
- def forward(self, x: torch.Tensor) -> torch.Tensor:
63
- out = torch.empty_like(x)
64
- ops.gelu_quick(out, x)
65
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_78448fa.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56dcc985761e309cbef3fc2a201f26e800583128d6e5a3fc1b23800fb0b8b48c
3
+ size 2440544
build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_9e96ad7.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:91c7e17f1ac04877baf753cfd1128df6db2511827df5c9e8befc6c8593809d2f
3
- size 2362720
 
 
 
 
build/torch26-cxx98-cu118-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_78448fa.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03c5f08322796d0736024412babe5d7f13bb1126387976ae12a80485a40d3883
3
+ size 2502240
build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_9e96ad7.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7449914458b3ac7c41718f18da6bd577f1b957235416f5a2ce4eda7d62b1b54c
3
- size 2420312
 
 
 
 
build/torch26-cxx98-cu124-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_78448fa.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6eae5c895c564fbd2524ce488f4e91e65dc63402cd41a8bc74474b7437b2e62
3
+ size 2506784
build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_9e96ad7.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:82acba03cbfc05b84f66f6a5eb451bf044a19e17d6fdea77755b56bc25f8bd58
3
- size 2424856
 
 
 
 
build/torch26-cxx98-cu126-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch27-cxx11-cu118-x86_64-linux/activation/_activation_78448fa.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8086b2d9e0f2db80385b83e0bc28f8d158725d002e1613e1a46a87732197e9f
3
+ size 2448152
build/torch27-cxx11-cu118-x86_64-linux/activation/_activation_9e96ad7.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5326ebdfb51c80f6b58847e320e4c22d0ab099ded2f7ed006b526753fadcb9cb
3
- size 2370328
 
 
 
 
build/torch27-cxx11-cu118-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch27-cxx11-cu126-x86_64-linux/activation/_activation_78448fa.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22ed530294eb70c8261e581615bd9da0d2dc1ba8c3f0dcc3696cff9be62580cb
3
+ size 2518600
build/torch27-cxx11-cu126-x86_64-linux/activation/_activation_9e96ad7.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2036ab72e0d8d0bd51785965d6d1e0eba1ef865400960f117b2d2d660b7e234d
3
- size 2436680
 
 
 
 
build/torch27-cxx11-cu126-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch27-cxx11-cu128-x86_64-linux/activation/_activation_78448fa.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d168f3ecfc9539e9a2f0af0a5f533bd958682efd1cc5bd716a964d8f1b6f679
3
+ size 3331432
build/torch27-cxx11-cu128-x86_64-linux/activation/_activation_9e96ad7.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a86cfd3eb440bd7af9887b3bbcbcfe884a1cc027c7ea67a34e404f5d6c8cc3cc
3
- size 2909528
 
 
 
 
build/torch27-cxx11-cu128-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_9e96ad7::{op_name}"
 
1
  import torch
2
+ from . import _activation_78448fa
3
+ ops = torch.ops._activation_78448fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_78448fa::{op_name}"
build/torch27-cxx11-rocm63-x86_64-linux/activation/__init__.py DELETED
@@ -1,52 +0,0 @@
1
- import torch
2
-
3
- from ._ops import ops
4
-
5
- from . import layers
6
-
7
-
8
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
9
- ops.silu_and_mul(out, x)
10
- return out
11
-
12
-
13
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- ops.gelu_and_mul(out, x)
15
- return out
16
-
17
-
18
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_tanh_and_mul(out, x)
20
- return out
21
-
22
-
23
- def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
24
- ops.fatrelu_and_mul(out, x, threshold)
25
- return out
26
-
27
-
28
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
29
- ops.gelu_fast(out, x)
30
- return out
31
-
32
-
33
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
34
- ops.gelu_new(out, x)
35
- return out
36
-
37
-
38
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
39
- ops.gelu_quick(out, x)
40
- return out
41
-
42
-
43
- __all__ = [
44
- "silu_and_mul",
45
- "gelu_and_mul",
46
- "gelu_tanh_and_mul",
47
- "fatrelu_and_mul",
48
- "gelu_fast",
49
- "gelu_new",
50
- "gelu_quick",
51
- "layers",
52
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch27-cxx11-rocm63-x86_64-linux/activation/_activation_9e96ad7.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8cae5f6b61bc920954d1c65becc79d79d4087b8f53bb06d3aa6760b4b3a342ef
3
- size 2467896
 
 
 
 
build/torch27-cxx11-rocm63-x86_64-linux/activation/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _activation_9e96ad7
3
- ops = torch.ops._activation_9e96ad7
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_activation_9e96ad7::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch27-cxx11-rocm63-x86_64-linux/activation/layers.py DELETED
@@ -1,65 +0,0 @@
1
- import torch
2
- import torch.nn as nn
3
-
4
- from ._ops import ops
5
-
6
-
7
- class SiluAndMul(nn.Module):
8
- def forward(self, x: torch.Tensor):
9
- d = x.shape[-1] // 2
10
- output_shape = x.shape[:-1] + (d,)
11
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
12
- ops.silu_and_mul(out, x)
13
- return out
14
-
15
-
16
- class GeluAndMul(nn.Module):
17
- def forward(self, x: torch.Tensor):
18
- d = x.shape[-1] // 2
19
- output_shape = x.shape[:-1] + (d,)
20
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
21
- ops.gelu_and_mul(out, x)
22
- return out
23
-
24
-
25
- class GeluTanhAndMul(nn.Module):
26
- def forward(self, x: torch.Tensor):
27
- d = x.shape[-1] // 2
28
- output_shape = x.shape[:-1] + (d,)
29
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
30
- ops.gelu_tanh_and_mul(out, x)
31
- return out
32
-
33
-
34
- class FatreluAndMul(nn.Module):
35
- def __init__(self, threshold: float = 0.0):
36
- super().__init__()
37
- self.threshold = threshold
38
-
39
- def forward(self, x: torch.Tensor):
40
- d = x.shape[-1] // 2
41
- output_shape = x.shape[:-1] + (d,)
42
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
43
- ops.fatrelu_and_mul(out, x, self.threshold)
44
- return out
45
-
46
-
47
- class FastGELU(nn.Module):
48
- def forward(self, x: torch.Tensor) -> torch.Tensor:
49
- out = torch.empty_like(x)
50
- ops.gelu_fast(out, x)
51
- return out
52
-
53
-
54
- class NewGELU(nn.Module):
55
- def forward(self, x: torch.Tensor) -> torch.Tensor:
56
- out = torch.empty_like(x)
57
- ops.gelu_new(out, x)
58
- return out
59
-
60
-
61
- class QuickGELU(nn.Module):
62
- def forward(self, x: torch.Tensor) -> torch.Tensor:
63
- out = torch.empty_like(x)
64
- ops.gelu_quick(out, x)
65
- return out