Build (with ROCm)
Browse files- build/torch25-cxx11-cu118-x86_64-linux/activation/{_activation_o63kkyjirmkf4.abi3.so β _activation_82352ca_dirty.abi3.so} +1 -1
- build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py +3 -3
- build/torch25-cxx11-cu121-x86_64-linux/activation/{_activation_vrl36m2ejer54.abi3.so β _activation_82352ca_dirty.abi3.so} +1 -1
- build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py +3 -3
- build/torch25-cxx11-cu124-x86_64-linux/activation/{_activation_va3moa75vw7c2.abi3.so β _activation_82352ca_dirty.abi3.so} +1 -1
- build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py +3 -3
- build/torch25-cxx98-cu118-x86_64-linux/activation/{_activation_qr3gs3eckeig4.abi3.so β _activation_82352ca_dirty.abi3.so} +1 -1
- build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py +3 -3
- build/torch25-cxx98-cu121-x86_64-linux/activation/{_activation_p7gbzt25w3zg2.abi3.so β _activation_82352ca_dirty.abi3.so} +1 -1
- build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py +3 -3
- build/torch25-cxx98-cu124-x86_64-linux/activation/{_activation_jg7yaigtn7wco.abi3.so β _activation_82352ca_dirty.abi3.so} +1 -1
- build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py +3 -3
- build/torch26-cxx11-cu118-x86_64-linux/activation/{_activation_ncisyrun7guwk.abi3.so β _activation_82352ca_dirty.abi3.so} +1 -1
- build/torch26-cxx11-cu118-x86_64-linux/activation/_ops.py +3 -3
- build/torch26-cxx11-cu124-x86_64-linux/activation/{_activation_ochhfvlnc3vyc.abi3.so β _activation_82352ca_dirty.abi3.so} +1 -1
- build/torch26-cxx11-cu124-x86_64-linux/activation/_ops.py +3 -3
- build/torch26-cxx11-cu126-x86_64-linux/activation/{_activation_u6vnqubnicksq.abi3.so β _activation_82352ca_dirty.abi3.so} +1 -1
- build/torch26-cxx11-cu126-x86_64-linux/activation/_ops.py +3 -3
- build/torch26-cxx11-rocm62-x86_64-linux/activation/__init__.py +52 -0
- build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +3 -0
- build/torch26-cxx11-rocm62-x86_64-linux/activation/_ops.py +9 -0
- build/torch26-cxx11-rocm62-x86_64-linux/activation/layers.py +65 -0
- build/torch26-cxx98-cu118-x86_64-linux/activation/{_activation_2vn6ty3gfqfb6.abi3.so β _activation_82352ca_dirty.abi3.so} +1 -1
- build/torch26-cxx98-cu118-x86_64-linux/activation/_ops.py +3 -3
- build/torch26-cxx98-cu124-x86_64-linux/activation/{_activation_myvteedxdpqc6.abi3.so β _activation_82352ca_dirty.abi3.so} +1 -1
- build/torch26-cxx98-cu124-x86_64-linux/activation/_ops.py +3 -3
- build/torch26-cxx98-cu126-x86_64-linux/activation/{_activation_rbswus6emrhm2.abi3.so β _activation_82352ca_dirty.abi3.so} +1 -1
- build/torch26-cxx98-cu126-x86_64-linux/activation/_ops.py +3 -3
build/torch25-cxx11-cu118-x86_64-linux/activation/{_activation_o63kkyjirmkf4.abi3.so β _activation_82352ca_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2370160
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5280ad24a57b6590fde1d1c9304a626c920f709044f8f57b89c861ffe5709190
|
3 |
size 2370160
|
build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_82352ca_dirty
|
3 |
+
ops = torch.ops._activation_82352ca_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_82352ca_dirty::{op_name}"
|
build/torch25-cxx11-cu121-x86_64-linux/activation/{_activation_vrl36m2ejer54.abi3.so β _activation_82352ca_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2393264
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e1f82faff6c6ff8b24b2b1759d0cf27834f4ee12774c8db65118dc00d1a0b7ea
|
3 |
size 2393264
|
build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_82352ca_dirty
|
3 |
+
ops = torch.ops._activation_82352ca_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_82352ca_dirty::{op_name}"
|
build/torch25-cxx11-cu124-x86_64-linux/activation/{_activation_va3moa75vw7c2.abi3.so β _activation_82352ca_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2427936
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bbf780e4050fdc1afb692a7242b8908be49de3119f32f8973dd9aca446cb3345
|
3 |
size 2427936
|
build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_82352ca_dirty
|
3 |
+
ops = torch.ops._activation_82352ca_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_82352ca_dirty::{op_name}"
|
build/torch25-cxx98-cu118-x86_64-linux/activation/{_activation_qr3gs3eckeig4.abi3.so β _activation_82352ca_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2362600
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5426caec78aa0c83a75ea73be6944be3054b8059714898ea7ed8deaaaff09750
|
3 |
size 2362600
|
build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_82352ca_dirty
|
3 |
+
ops = torch.ops._activation_82352ca_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_82352ca_dirty::{op_name}"
|
build/torch25-cxx98-cu121-x86_64-linux/activation/{_activation_p7gbzt25w3zg2.abi3.so β _activation_82352ca_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2385440
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:931fc38164f712f97fc299609f1f21a8efa009b355bbec691f6f6d95b2760fc7
|
3 |
size 2385440
|
build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_82352ca_dirty
|
3 |
+
ops = torch.ops._activation_82352ca_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_82352ca_dirty::{op_name}"
|
build/torch25-cxx98-cu124-x86_64-linux/activation/{_activation_jg7yaigtn7wco.abi3.so β _activation_82352ca_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2420192
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:79a354ed436f6084e3d77e5fdf1408182f4a85c9db190068c59f37fd95237133
|
3 |
size 2420192
|
build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_82352ca_dirty
|
3 |
+
ops = torch.ops._activation_82352ca_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_82352ca_dirty::{op_name}"
|
build/torch26-cxx11-cu118-x86_64-linux/activation/{_activation_ncisyrun7guwk.abi3.so β _activation_82352ca_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2370264
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5152eb865572482ef3e08cbe3daa5ca787273df76d32b6ab99fe8e29b660ae27
|
3 |
size 2370264
|
build/torch26-cxx11-cu118-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_82352ca_dirty
|
3 |
+
ops = torch.ops._activation_82352ca_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_82352ca_dirty::{op_name}"
|
build/torch26-cxx11-cu124-x86_64-linux/activation/{_activation_ochhfvlnc3vyc.abi3.so β _activation_82352ca_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2428040
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1c5b950db6a6057ecf572068e85e48d9e587f131e42f0ff27f69d24866129035
|
3 |
size 2428040
|
build/torch26-cxx11-cu124-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_82352ca_dirty
|
3 |
+
ops = torch.ops._activation_82352ca_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_82352ca_dirty::{op_name}"
|
build/torch26-cxx11-cu126-x86_64-linux/activation/{_activation_u6vnqubnicksq.abi3.so β _activation_82352ca_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2436672
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:21257c16027980c8f66248c0582b8a0cd864bf20e05c736465bebc5f1f7deb9c
|
3 |
size 2436672
|
build/torch26-cxx11-cu126-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_82352ca_dirty
|
3 |
+
ops = torch.ops._activation_82352ca_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_82352ca_dirty::{op_name}"
|
build/torch26-cxx11-rocm62-x86_64-linux/activation/__init__.py
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
|
3 |
+
from ._ops import ops
|
4 |
+
|
5 |
+
from . import layers
|
6 |
+
|
7 |
+
|
8 |
+
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
9 |
+
ops.silu_and_mul(out, x)
|
10 |
+
return out
|
11 |
+
|
12 |
+
|
13 |
+
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
14 |
+
ops.gelu_and_mul(out, x)
|
15 |
+
return out
|
16 |
+
|
17 |
+
|
18 |
+
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
19 |
+
ops.gelu_tanh_and_mul(out, x)
|
20 |
+
return out
|
21 |
+
|
22 |
+
|
23 |
+
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
|
24 |
+
ops.fatrelu_and_mul(out, x, threshold)
|
25 |
+
return out
|
26 |
+
|
27 |
+
|
28 |
+
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
|
29 |
+
ops.gelu_fast(out, x)
|
30 |
+
return out
|
31 |
+
|
32 |
+
|
33 |
+
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
|
34 |
+
ops.gelu_new(out, x)
|
35 |
+
return out
|
36 |
+
|
37 |
+
|
38 |
+
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
|
39 |
+
ops.gelu_quick(out, x)
|
40 |
+
return out
|
41 |
+
|
42 |
+
|
43 |
+
__all__ = [
|
44 |
+
"silu_and_mul",
|
45 |
+
"gelu_and_mul",
|
46 |
+
"gelu_tanh_and_mul",
|
47 |
+
"fatrelu_and_mul",
|
48 |
+
"gelu_fast",
|
49 |
+
"gelu_new",
|
50 |
+
"gelu_quick",
|
51 |
+
"layers",
|
52 |
+
]
|
build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:20dcdfa632a9c0c3f51e45829a169c04aa56d1d626a29185cf56596a4d44343a
|
3 |
+
size 2465784
|
build/torch26-cxx11-rocm62-x86_64-linux/activation/_ops.py
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from . import _activation_82352ca_dirty
|
3 |
+
ops = torch.ops._activation_82352ca_dirty
|
4 |
+
|
5 |
+
def add_op_namespace_prefix(op_name: str):
|
6 |
+
"""
|
7 |
+
Prefix op by namespace.
|
8 |
+
"""
|
9 |
+
return f"_activation_82352ca_dirty::{op_name}"
|
build/torch26-cxx11-rocm62-x86_64-linux/activation/layers.py
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
|
4 |
+
from ._ops import ops
|
5 |
+
|
6 |
+
|
7 |
+
class SiluAndMul(nn.Module):
|
8 |
+
def forward(self, x: torch.Tensor):
|
9 |
+
d = x.shape[-1] // 2
|
10 |
+
output_shape = x.shape[:-1] + (d,)
|
11 |
+
out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
|
12 |
+
ops.silu_and_mul(out, x)
|
13 |
+
return out
|
14 |
+
|
15 |
+
|
16 |
+
class GeluAndMul(nn.Module):
|
17 |
+
def forward(self, x: torch.Tensor):
|
18 |
+
d = x.shape[-1] // 2
|
19 |
+
output_shape = x.shape[:-1] + (d,)
|
20 |
+
out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
|
21 |
+
ops.gelu_and_mul(out, x)
|
22 |
+
return out
|
23 |
+
|
24 |
+
|
25 |
+
class GeluTanhAndMul(nn.Module):
|
26 |
+
def forward(self, x: torch.Tensor):
|
27 |
+
d = x.shape[-1] // 2
|
28 |
+
output_shape = x.shape[:-1] + (d,)
|
29 |
+
out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
|
30 |
+
ops.gelu_tanh_and_mul(out, x)
|
31 |
+
return out
|
32 |
+
|
33 |
+
|
34 |
+
class FatreluAndMul(nn.Module):
|
35 |
+
def __init__(self, threshold: float = 0.0):
|
36 |
+
super().__init__()
|
37 |
+
self.threshold = threshold
|
38 |
+
|
39 |
+
def forward(self, x: torch.Tensor):
|
40 |
+
d = x.shape[-1] // 2
|
41 |
+
output_shape = x.shape[:-1] + (d,)
|
42 |
+
out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
|
43 |
+
ops.fatrelu_and_mul(out, x, self.threshold)
|
44 |
+
return out
|
45 |
+
|
46 |
+
|
47 |
+
class FastGELU(nn.Module):
|
48 |
+
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
49 |
+
out = torch.empty_like(x)
|
50 |
+
ops.gelu_fast(out, x)
|
51 |
+
return out
|
52 |
+
|
53 |
+
|
54 |
+
class NewGELU(nn.Module):
|
55 |
+
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
56 |
+
out = torch.empty_like(x)
|
57 |
+
ops.gelu_new(out, x)
|
58 |
+
return out
|
59 |
+
|
60 |
+
|
61 |
+
class QuickGELU(nn.Module):
|
62 |
+
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
63 |
+
out = torch.empty_like(x)
|
64 |
+
ops.gelu_quick(out, x)
|
65 |
+
return out
|
build/torch26-cxx98-cu118-x86_64-linux/activation/{_activation_2vn6ty3gfqfb6.abi3.so β _activation_82352ca_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2362752
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1f5d622e4f0db5838b4f728795e43318a751886ca68086ec05901f7601cece34
|
3 |
size 2362752
|
build/torch26-cxx98-cu118-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_82352ca_dirty
|
3 |
+
ops = torch.ops._activation_82352ca_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_82352ca_dirty::{op_name}"
|
build/torch26-cxx98-cu124-x86_64-linux/activation/{_activation_myvteedxdpqc6.abi3.so β _activation_82352ca_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2420344
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:94cc812ba7c524a5c926f8ea8ec8b7bf7243a9906145995dc27fe009978adbb8
|
3 |
size 2420344
|
build/torch26-cxx98-cu124-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_82352ca_dirty
|
3 |
+
ops = torch.ops._activation_82352ca_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_82352ca_dirty::{op_name}"
|
build/torch26-cxx98-cu126-x86_64-linux/activation/{_activation_rbswus6emrhm2.abi3.so β _activation_82352ca_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2424888
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:07f30fab40984f829e6ab081a6b771b8202b4d676c6e7156d00d26308fbe4695
|
3 |
size 2424888
|
build/torch26-cxx98-cu126-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_82352ca_dirty
|
3 |
+
ops = torch.ops._activation_82352ca_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_82352ca_dirty::{op_name}"
|