danieldk HF Staff commited on
Commit
5eb950d
Β·
1 Parent(s): 73ba2c1

Add extension versioning

Browse files
Files changed (38) hide show
  1. build.toml +3 -0
  2. build/torch24-cxx11-cu118-x86_64-linux/activation/__init__.py +17 -8
  3. build/torch24-cxx11-cu118-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} +0 -0
  4. build/torch24-cxx11-cu118-x86_64-linux/activation/_ops.py +3 -0
  5. build/torch24-cxx11-cu121-x86_64-linux/activation/__init__.py +17 -8
  6. build/torch24-cxx11-cu121-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} +0 -0
  7. build/torch24-cxx11-cu121-x86_64-linux/activation/_ops.py +3 -0
  8. build/torch24-cxx11-cu124-x86_64-linux/activation/__init__.py +17 -8
  9. build/torch24-cxx11-cu124-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} +0 -0
  10. build/torch24-cxx11-cu124-x86_64-linux/activation/_ops.py +3 -0
  11. build/torch24-cxx98-cu118-x86_64-linux/activation/__init__.py +17 -8
  12. build/torch24-cxx98-cu118-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} +0 -0
  13. build/torch24-cxx98-cu118-x86_64-linux/activation/_ops.py +3 -0
  14. build/torch24-cxx98-cu121-x86_64-linux/activation/__init__.py +17 -8
  15. build/torch24-cxx98-cu121-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} +0 -0
  16. build/torch24-cxx98-cu121-x86_64-linux/activation/_ops.py +3 -0
  17. build/torch24-cxx98-cu124-x86_64-linux/activation/__init__.py +17 -8
  18. build/torch24-cxx98-cu124-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} +0 -0
  19. build/torch24-cxx98-cu124-x86_64-linux/activation/_ops.py +3 -0
  20. build/torch25-cxx11-cu118-x86_64-linux/activation/__init__.py +17 -8
  21. build/torch25-cxx11-cu118-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} +0 -0
  22. build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py +3 -0
  23. build/torch25-cxx11-cu121-x86_64-linux/activation/__init__.py +17 -8
  24. build/torch25-cxx11-cu121-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} +0 -0
  25. build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py +3 -0
  26. build/torch25-cxx11-cu124-x86_64-linux/activation/__init__.py +17 -8
  27. build/torch25-cxx11-cu124-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} +0 -0
  28. build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py +3 -0
  29. build/torch25-cxx98-cu118-x86_64-linux/activation/__init__.py +17 -8
  30. build/torch25-cxx98-cu118-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} +0 -0
  31. build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py +3 -0
  32. build/torch25-cxx98-cu121-x86_64-linux/activation/__init__.py +17 -8
  33. build/torch25-cxx98-cu121-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} +0 -0
  34. build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py +3 -0
  35. build/torch25-cxx98-cu124-x86_64-linux/activation/__init__.py +17 -8
  36. build/torch25-cxx98-cu124-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} +0 -0
  37. build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py +3 -0
  38. ext-torch/__init__.py +17 -8
build.toml CHANGED
@@ -1,3 +1,6 @@
 
 
 
1
  [torch]
2
  name = "activation"
3
  src = [
 
1
+ [general]
2
+ version = "0.0.1"
3
+
4
  [torch]
5
  name = "activation"
6
  src = [
build/torch24-cxx11-cu118-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch24-cxx11-cu118-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} RENAMED
Binary files a/build/torch24-cxx11-cu118-x86_64-linux/activation/_activation.abi3.so and b/build/torch24-cxx11-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so differ
 
build/torch24-cxx11-cu118-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch24-cxx11-cu121-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch24-cxx11-cu121-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} RENAMED
Binary files a/build/torch24-cxx11-cu121-x86_64-linux/activation/_activation.abi3.so and b/build/torch24-cxx11-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so differ
 
build/torch24-cxx11-cu121-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch24-cxx11-cu124-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch24-cxx11-cu124-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} RENAMED
Binary files a/build/torch24-cxx11-cu124-x86_64-linux/activation/_activation.abi3.so and b/build/torch24-cxx11-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so differ
 
build/torch24-cxx11-cu124-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch24-cxx98-cu118-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch24-cxx98-cu118-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} RENAMED
Binary files a/build/torch24-cxx98-cu118-x86_64-linux/activation/_activation.abi3.so and b/build/torch24-cxx98-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so differ
 
build/torch24-cxx98-cu118-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch24-cxx98-cu121-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch24-cxx98-cu121-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} RENAMED
Binary files a/build/torch24-cxx98-cu121-x86_64-linux/activation/_activation.abi3.so and b/build/torch24-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so differ
 
build/torch24-cxx98-cu121-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch24-cxx98-cu124-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch24-cxx98-cu124-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} RENAMED
Binary files a/build/torch24-cxx98-cu124-x86_64-linux/activation/_activation.abi3.so and b/build/torch24-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so differ
 
build/torch24-cxx98-cu124-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch25-cxx11-cu118-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch25-cxx11-cu118-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} RENAMED
Binary files a/build/torch25-cxx11-cu118-x86_64-linux/activation/_activation.abi3.so and b/build/torch25-cxx11-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so differ
 
build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch25-cxx11-cu121-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch25-cxx11-cu121-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} RENAMED
Binary files a/build/torch25-cxx11-cu121-x86_64-linux/activation/_activation.abi3.so and b/build/torch25-cxx11-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so differ
 
build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch25-cxx11-cu124-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch25-cxx11-cu124-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} RENAMED
Binary files a/build/torch25-cxx11-cu124-x86_64-linux/activation/_activation.abi3.so and b/build/torch25-cxx11-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so differ
 
build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch25-cxx98-cu118-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch25-cxx98-cu118-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} RENAMED
Binary files a/build/torch25-cxx98-cu118-x86_64-linux/activation/_activation.abi3.so and b/build/torch25-cxx98-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so differ
 
build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch25-cxx98-cu121-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch25-cxx98-cu121-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} RENAMED
Binary files a/build/torch25-cxx98-cu121-x86_64-linux/activation/_activation.abi3.so and b/build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so differ
 
build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch25-cxx98-cu124-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch25-cxx98-cu124-x86_64-linux/activation/{_activation.abi3.so β†’ _activation_0_0_1.abi3.so} RENAMED
Binary files a/build/torch25-cxx98-cu124-x86_64-linux/activation/_activation.abi3.so and b/build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so differ
 
build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
ext-torch/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)