danieldk HF Staff commited on
Commit
dee1261
·
1 Parent(s): af4a488

Add extension versioning

Browse files
Files changed (46) hide show
  1. build.toml +3 -0
  2. build/torch24-cxx11-cu118-x86_64-linux/activation/__init__.py +17 -8
  3. build/torch24-cxx11-cu118-x86_64-linux/activation/{_activation.abi3.so → _activation_0_0_1.abi3.so} +2 -2
  4. build/torch24-cxx11-cu118-x86_64-linux/activation/_ops.py +3 -0
  5. build/torch24-cxx11-cu121-x86_64-linux/activation/__init__.py +17 -8
  6. build/torch24-cxx11-cu121-x86_64-linux/activation/{_activation.abi3.so → _activation_0_0_1.abi3.so} +2 -2
  7. build/torch24-cxx11-cu121-x86_64-linux/activation/_ops.py +3 -0
  8. build/torch24-cxx11-cu124-x86_64-linux/activation/__init__.py +17 -8
  9. build/torch24-cxx11-cu124-x86_64-linux/activation/{_activation.abi3.so → _activation_0_0_1.abi3.so} +2 -2
  10. build/torch24-cxx11-cu124-x86_64-linux/activation/_ops.py +3 -0
  11. build/torch24-cxx98-cu118-x86_64-linux/activation/__init__.py +17 -8
  12. build/torch24-cxx98-cu118-x86_64-linux/activation/{_activation.abi3.so → _activation_0_0_1.abi3.so} +2 -2
  13. build/torch24-cxx98-cu118-x86_64-linux/activation/_ops.py +3 -0
  14. build/torch24-cxx98-cu121-x86_64-linux/activation/__init__.py +17 -8
  15. build/torch24-cxx98-cu121-x86_64-linux/activation/_activation.abi3.so +0 -3
  16. build/torch24-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so +3 -0
  17. build/torch24-cxx98-cu121-x86_64-linux/activation/_ops.py +3 -0
  18. build/torch24-cxx98-cu124-x86_64-linux/activation/__init__.py +17 -8
  19. build/torch24-cxx98-cu124-x86_64-linux/activation/_activation.abi3.so +0 -3
  20. build/torch24-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so +3 -0
  21. build/torch24-cxx98-cu124-x86_64-linux/activation/_ops.py +3 -0
  22. build/torch25-cxx11-cu118-x86_64-linux/activation/__init__.py +17 -8
  23. build/torch25-cxx11-cu118-x86_64-linux/activation/_activation.abi3.so +0 -3
  24. build/torch25-cxx11-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so +3 -0
  25. build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py +3 -0
  26. build/torch25-cxx11-cu121-x86_64-linux/activation/__init__.py +17 -8
  27. build/torch25-cxx11-cu121-x86_64-linux/activation/_activation.abi3.so +0 -3
  28. build/torch25-cxx11-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so +3 -0
  29. build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py +3 -0
  30. build/torch25-cxx11-cu124-x86_64-linux/activation/__init__.py +17 -8
  31. build/torch25-cxx11-cu124-x86_64-linux/activation/_activation.abi3.so +0 -3
  32. build/torch25-cxx11-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so +3 -0
  33. build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py +3 -0
  34. build/torch25-cxx98-cu118-x86_64-linux/activation/__init__.py +17 -8
  35. build/torch25-cxx98-cu118-x86_64-linux/activation/_activation.abi3.so +0 -3
  36. build/torch25-cxx98-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so +3 -0
  37. build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py +3 -0
  38. build/torch25-cxx98-cu121-x86_64-linux/activation/__init__.py +17 -8
  39. build/torch25-cxx98-cu121-x86_64-linux/activation/_activation.abi3.so +0 -3
  40. build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so +3 -0
  41. build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py +3 -0
  42. build/torch25-cxx98-cu124-x86_64-linux/activation/__init__.py +17 -8
  43. build/torch25-cxx98-cu124-x86_64-linux/activation/_activation.abi3.so +0 -3
  44. build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so +3 -0
  45. build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py +3 -0
  46. ext-torch/__init__.py +17 -8
build.toml CHANGED
@@ -1,3 +1,6 @@
 
 
 
1
  [torch]
2
  name = "activation"
3
  src = [
 
1
+ [general]
2
+ version = "0.0.1"
3
+
4
  [torch]
5
  name = "activation"
6
  src = [
build/torch24-cxx11-cu118-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch24-cxx11-cu118-x86_64-linux/activation/{_activation.abi3.so → _activation_0_0_1.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c2c0bafa8637ae28052747a180d880eade588382d690c39d37e8a86a6f399b58
3
- size 775048
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75cc4d6e78b8c6de3cd5dca2e8c0e66deebe84756cd5e46697f1194d4b7824e8
3
+ size 775080
build/torch24-cxx11-cu118-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch24-cxx11-cu121-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch24-cxx11-cu121-x86_64-linux/activation/{_activation.abi3.so → _activation_0_0_1.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f1b3ac478c39e2e7b7b8baf65bb3fbf1fb6aa577e83fa10b83a7f0492edefabc
3
- size 771184
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54cd203d14bd0aecb50741dce4b56b9c4d828d357956d5a019fcc8af082eabe4
3
+ size 771216
build/torch24-cxx11-cu121-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch24-cxx11-cu124-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch24-cxx11-cu124-x86_64-linux/activation/{_activation.abi3.so → _activation_0_0_1.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:70c414b395dd4e9c0d69e9d7c2b6f836b18ac3efe6275a44ccd4ea72450b9df9
3
- size 807968
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d9be0c307589972018be48e8d3bf658f07ce6e51482a3b6ed89310af86250ed
3
+ size 808000
build/torch24-cxx11-cu124-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch24-cxx98-cu118-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch24-cxx98-cu118-x86_64-linux/activation/{_activation.abi3.so → _activation_0_0_1.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:62c78c86c1642c2dcd14801c116a2ee16d460504f37168ced78d59a9e72513bb
3
- size 761456
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23cb6373e29f7bb4171bae06dd294876681c7f7bb07315aa0a1d2b8b4583cda3
3
+ size 761480
build/torch24-cxx98-cu118-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch24-cxx98-cu121-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch24-cxx98-cu121-x86_64-linux/activation/_activation.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3df5b3748a14214c1c86100f14f14e6125efada9079955dc0d65d7575a6debb6
3
- size 757400
 
 
 
 
build/torch24-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad1a5445296703479f3201e099259cc4aa4459f8ae5117f8235dc1636582dcc2
3
+ size 757424
build/torch24-cxx98-cu121-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch24-cxx98-cu124-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch24-cxx98-cu124-x86_64-linux/activation/_activation.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d3c9e874d518d522c6fb3106515376eaa7151bdbba9b029f3c70448a65d6895d
3
- size 798360
 
 
 
 
build/torch24-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09e025b6b8a301e237947cb7c35be59511be31d3bdbfa63be0d7480490551d7b
3
+ size 798384
build/torch24-cxx98-cu124-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch25-cxx11-cu118-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch25-cxx11-cu118-x86_64-linux/activation/_activation.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8e7b40d071f2fdb422d1caee712e5d0328c3cd85dc17695923fc11885a397f75
3
- size 775048
 
 
 
 
build/torch25-cxx11-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2004d875a8fcaa64d3324e427b908abc8d339cffc38b9fabdb5666dac435fbfe
3
+ size 775080
build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch25-cxx11-cu121-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch25-cxx11-cu121-x86_64-linux/activation/_activation.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:865e5b0255dbc6566459444b9f2325f9f54e568213476a66992314ebb1a47e1d
3
- size 771184
 
 
 
 
build/torch25-cxx11-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd24afaeecddebb5cfb6e71ba7e30d5ec3588e74928917935b36ef4be0af37c6
3
+ size 771216
build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch25-cxx11-cu124-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch25-cxx11-cu124-x86_64-linux/activation/_activation.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3d0524c8b70ea4e427ca499312f076e834d06fe11bea9dd4decb2406a3094c8a
3
- size 807968
 
 
 
 
build/torch25-cxx11-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:873f027d0761a39b5a34b27718e4a2b04db1fe724eebc1f6469c6a2cdc36ea16
3
+ size 808000
build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch25-cxx98-cu118-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch25-cxx98-cu118-x86_64-linux/activation/_activation.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f1a3146807c8c7c97a5222dae8fe0aa186ee01d5bc466bf3b74b49e23ecd156a
3
- size 761456
 
 
 
 
build/torch25-cxx98-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf2c0968222ddc059adf3c1726e9aeb9cba4f3c42cbf2b814d5ddbe36956c232
3
+ size 761480
build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch25-cxx98-cu121-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch25-cxx98-cu121-x86_64-linux/activation/_activation.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5536093e89b5707856d62f677da924c5e3c51e0a2868d3e7e11475508c97d5cf
3
- size 757400
 
 
 
 
build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff5b67d79c949546bfc2ced1abf3eaaeec56a707929185ca41baa6d68054d5b1
3
+ size 757424
build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
build/torch25-cxx98-cu124-x86_64-linux/activation/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)
build/torch25-cxx98-cu124-x86_64-linux/activation/_activation.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:aa6928c9ae395303ab136e1d9761316a77da82f0a0b000996b52d9667f0b1f84
3
- size 798360
 
 
 
 
build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f371ce792c304716c75c523e1664c428a70e3af92bd3ca2053f5052139bf199
3
+ size 798384
build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
ext-torch/__init__.py CHANGED
@@ -1,32 +1,41 @@
1
  import torch
2
 
3
- import activation._activation
 
 
 
 
 
 
 
 
 
4
 
5
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
6
- torch.ops._activation.silu_and_mul(out, x)
7
 
8
 
9
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
10
- torch.ops._activation.gelu_and_mul(out, x)
11
 
12
 
13
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
- torch.ops._activation.gelu_tanh_and_mul(out, x)
15
 
16
 
17
  def fatrelu_and_mul(out: torch.Tensor,
18
  x: torch.Tensor,
19
  threshold: float = 0.0) -> None:
20
- torch.ops._activation.fatrelu_and_mul(out, x, threshold)
21
 
22
 
23
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
24
- torch.ops._activation.gelu_fast(out, x)
25
 
26
 
27
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
28
- torch.ops._activation.gelu_new(out, x)
29
 
30
 
31
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
32
- torch.ops._activation.gelu_quick(out, x)
 
1
  import torch
2
 
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+ ops = torch.ops._activition
10
+ except ImportError:
11
+ raise e
12
+
13
 
14
  def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
15
+ ops.silu_and_mul(out, x)
16
 
17
 
18
  def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_and_mul(out, x)
20
 
21
 
22
  def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
23
+ ops.gelu_tanh_and_mul(out, x)
24
 
25
 
26
  def fatrelu_and_mul(out: torch.Tensor,
27
  x: torch.Tensor,
28
  threshold: float = 0.0) -> None:
29
+ ops.fatrelu_and_mul(out, x, threshold)
30
 
31
 
32
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
33
+ ops.gelu_fast(out, x)
34
 
35
 
36
  def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
37
+ ops.gelu_new(out, x)
38
 
39
 
40
  def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_quick(out, x)