diff --git a/build/torch25-cxx11-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so b/build/torch25-cxx11-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so deleted file mode 100755 index 20f8021af2e3d5fb18dc66720a45a5ef1e8807dd..0000000000000000000000000000000000000000 --- a/build/torch25-cxx11-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5280ad24a57b6590fde1d1c9304a626c920f709044f8f57b89c861ffe5709190 -size 2370160 diff --git a/build/torch25-cxx11-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch25-cxx11-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..edaffc1a81ca4cf5eca5463d7e314e66a6db5330 --- /dev/null +++ b/build/torch25-cxx11-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a2958ebcd7f64aad946ac6145d6ac54e6d660578a952a7ad8835a074a88053a +size 2370128 diff --git a/build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py b/build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py index 5e09a02430f82264f549ba881c5d8ad77b0a3eb9..d05d57ec3942d443a97df8990939b323dd96236b 100644 --- a/build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py +++ b/build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py @@ -1,9 +1,9 @@ import torch -from . import _activation_82352ca_dirty -ops = torch.ops._activation_82352ca_dirty +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 def add_op_namespace_prefix(op_name: str): """ Prefix op by namespace. """ - return f"_activation_82352ca_dirty::{op_name}" \ No newline at end of file + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx11-cu121-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so b/build/torch25-cxx11-cu121-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so deleted file mode 100755 index 38cbe109645a2a719e056ac69f0c8b464e10097d..0000000000000000000000000000000000000000 --- a/build/torch25-cxx11-cu121-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e1f82faff6c6ff8b24b2b1759d0cf27834f4ee12774c8db65118dc00d1a0b7ea -size 2393264 diff --git a/build/torch25-cxx11-cu121-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch25-cxx11-cu121-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..85feb644fce3e257f5c10cd8cc3d132c8c395909 --- /dev/null +++ b/build/torch25-cxx11-cu121-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:656aacfa25e45193b75506f0787757384bdc93be210c9631bdcea6a0886eb64e +size 2393232 diff --git a/build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py b/build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py index 5e09a02430f82264f549ba881c5d8ad77b0a3eb9..d05d57ec3942d443a97df8990939b323dd96236b 100644 --- a/build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py +++ b/build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py @@ -1,9 +1,9 @@ import torch -from . import _activation_82352ca_dirty -ops = torch.ops._activation_82352ca_dirty +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 def add_op_namespace_prefix(op_name: str): """ Prefix op by namespace. """ - return f"_activation_82352ca_dirty::{op_name}" \ No newline at end of file + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx11-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so b/build/torch25-cxx11-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so deleted file mode 100755 index 3555f95ec07137ebf523dc9f767afa40a742991c..0000000000000000000000000000000000000000 --- a/build/torch25-cxx11-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:bbf780e4050fdc1afb692a7242b8908be49de3119f32f8973dd9aca446cb3345 -size 2427936 diff --git a/build/torch25-cxx11-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch25-cxx11-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..92ffc49ea00cad986986daa71ee528e42d2f054a --- /dev/null +++ b/build/torch25-cxx11-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9687c3d2339f75efbd24327b6f405e706cc6dba4607bdce66901c7578f610a17 +size 2427912 diff --git a/build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py b/build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py index 5e09a02430f82264f549ba881c5d8ad77b0a3eb9..d05d57ec3942d443a97df8990939b323dd96236b 100644 --- a/build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py +++ b/build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py @@ -1,9 +1,9 @@ import torch -from . import _activation_82352ca_dirty -ops = torch.ops._activation_82352ca_dirty +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 def add_op_namespace_prefix(op_name: str): """ Prefix op by namespace. """ - return f"_activation_82352ca_dirty::{op_name}" \ No newline at end of file + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx98-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so b/build/torch25-cxx98-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so deleted file mode 100755 index 7cfa137011f274130a05f5a48704a5c008182b4a..0000000000000000000000000000000000000000 --- a/build/torch25-cxx98-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5426caec78aa0c83a75ea73be6944be3054b8059714898ea7ed8deaaaff09750 -size 2362600 diff --git a/build/torch25-cxx98-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch25-cxx98-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..231d963b5da3e86215785f103116c8247480ac22 --- /dev/null +++ b/build/torch25-cxx98-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4856dac4839c5ea477c15b9a71817b67703942edfd932ef0802b9cbb979e11b7 +size 2362568 diff --git a/build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py b/build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py index 5e09a02430f82264f549ba881c5d8ad77b0a3eb9..d05d57ec3942d443a97df8990939b323dd96236b 100644 --- a/build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py +++ b/build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py @@ -1,9 +1,9 @@ import torch -from . import _activation_82352ca_dirty -ops = torch.ops._activation_82352ca_dirty +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 def add_op_namespace_prefix(op_name: str): """ Prefix op by namespace. """ - return f"_activation_82352ca_dirty::{op_name}" \ No newline at end of file + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so b/build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so deleted file mode 100755 index 041bc94ba31406201db39e25669173859cdc9297..0000000000000000000000000000000000000000 --- a/build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:931fc38164f712f97fc299609f1f21a8efa009b355bbec691f6f6d95b2760fc7 -size 2385440 diff --git a/build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..34b2ed296634589a7ce00e41d61ad0cb6fe6b0d4 --- /dev/null +++ b/build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2061b1f6047a9fe847954fd28d0aae6fb820d2e71dc024a498b370bc2386dcc6 +size 2385408 diff --git a/build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py b/build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py index 5e09a02430f82264f549ba881c5d8ad77b0a3eb9..d05d57ec3942d443a97df8990939b323dd96236b 100644 --- a/build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py +++ b/build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py @@ -1,9 +1,9 @@ import torch -from . import _activation_82352ca_dirty -ops = torch.ops._activation_82352ca_dirty +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 def add_op_namespace_prefix(op_name: str): """ Prefix op by namespace. """ - return f"_activation_82352ca_dirty::{op_name}" \ No newline at end of file + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so b/build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so deleted file mode 100755 index eead920af2f75f76863e94ec259e37fabaaf07ed..0000000000000000000000000000000000000000 --- a/build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:79a354ed436f6084e3d77e5fdf1408182f4a85c9db190068c59f37fd95237133 -size 2420192 diff --git a/build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..b139c23a657d6450326049af6d5a0a1b61102ebb --- /dev/null +++ b/build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7ccf34c60f89c6ff7d6ef018074545d5b393a53c7c18ee7f83cc92e0c11a6b78 +size 2420168 diff --git a/build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py b/build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py index 5e09a02430f82264f549ba881c5d8ad77b0a3eb9..d05d57ec3942d443a97df8990939b323dd96236b 100644 --- a/build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py +++ b/build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py @@ -1,9 +1,9 @@ import torch -from . import _activation_82352ca_dirty -ops = torch.ops._activation_82352ca_dirty +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 def add_op_namespace_prefix(op_name: str): """ Prefix op by namespace. """ - return f"_activation_82352ca_dirty::{op_name}" \ No newline at end of file + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so b/build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so deleted file mode 100755 index 53616f610f8fbb35a7dda0a56e61872349d2b55f..0000000000000000000000000000000000000000 --- a/build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5152eb865572482ef3e08cbe3daa5ca787273df76d32b6ab99fe8e29b660ae27 -size 2370264 diff --git a/build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..ac18e497cee2b586fa952d2613b82b1f5a580c0c --- /dev/null +++ b/build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ac74565abc94658e2bf251e0d9b65ec8b5baaab62f1c1d5a4b216b929540bbd +size 2370232 diff --git a/build/torch26-cxx11-cu118-x86_64-linux/activation/_ops.py b/build/torch26-cxx11-cu118-x86_64-linux/activation/_ops.py index 5e09a02430f82264f549ba881c5d8ad77b0a3eb9..d05d57ec3942d443a97df8990939b323dd96236b 100644 --- a/build/torch26-cxx11-cu118-x86_64-linux/activation/_ops.py +++ b/build/torch26-cxx11-cu118-x86_64-linux/activation/_ops.py @@ -1,9 +1,9 @@ import torch -from . import _activation_82352ca_dirty -ops = torch.ops._activation_82352ca_dirty +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 def add_op_namespace_prefix(op_name: str): """ Prefix op by namespace. """ - return f"_activation_82352ca_dirty::{op_name}" \ No newline at end of file + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so b/build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so deleted file mode 100755 index 3c09bcc8e4158980c70088539dabe78993fc4189..0000000000000000000000000000000000000000 --- a/build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1c5b950db6a6057ecf572068e85e48d9e587f131e42f0ff27f69d24866129035 -size 2428040 diff --git a/build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..2dd7ef2ce20447386c206446fb28b61dc83d7ad8 --- /dev/null +++ b/build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:25a10f905f10a5f99d85df4a6b71434a23f827d2a53bd5cdaa71eb40c631e1e9 +size 2428008 diff --git a/build/torch26-cxx11-cu124-x86_64-linux/activation/_ops.py b/build/torch26-cxx11-cu124-x86_64-linux/activation/_ops.py index 5e09a02430f82264f549ba881c5d8ad77b0a3eb9..d05d57ec3942d443a97df8990939b323dd96236b 100644 --- a/build/torch26-cxx11-cu124-x86_64-linux/activation/_ops.py +++ b/build/torch26-cxx11-cu124-x86_64-linux/activation/_ops.py @@ -1,9 +1,9 @@ import torch -from . import _activation_82352ca_dirty -ops = torch.ops._activation_82352ca_dirty +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 def add_op_namespace_prefix(op_name: str): """ Prefix op by namespace. """ - return f"_activation_82352ca_dirty::{op_name}" \ No newline at end of file + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so b/build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so deleted file mode 100755 index d7bc8f1bdd74b38f7bf32b1a8d5d5e5e1b081a21..0000000000000000000000000000000000000000 --- a/build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:21257c16027980c8f66248c0582b8a0cd864bf20e05c736465bebc5f1f7deb9c -size 2436672 diff --git a/build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..0d16a2c307f1c67ec111c44f3e541e5388a9d961 --- /dev/null +++ b/build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a3797124cb8165368b885542518f3dedacd41aeb22681477f4e71ef05e1f006 +size 2436648 diff --git a/build/torch26-cxx11-cu126-x86_64-linux/activation/_ops.py b/build/torch26-cxx11-cu126-x86_64-linux/activation/_ops.py index 5e09a02430f82264f549ba881c5d8ad77b0a3eb9..d05d57ec3942d443a97df8990939b323dd96236b 100644 --- a/build/torch26-cxx11-cu126-x86_64-linux/activation/_ops.py +++ b/build/torch26-cxx11-cu126-x86_64-linux/activation/_ops.py @@ -1,9 +1,9 @@ import torch -from . import _activation_82352ca_dirty -ops = torch.ops._activation_82352ca_dirty +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 def add_op_namespace_prefix(op_name: str): """ Prefix op by namespace. """ - return f"_activation_82352ca_dirty::{op_name}" \ No newline at end of file + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so b/build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so deleted file mode 100755 index d3670700709cd66e4efc6884a4026a9eec124375..0000000000000000000000000000000000000000 --- a/build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:20dcdfa632a9c0c3f51e45829a169c04aa56d1d626a29185cf56596a4d44343a -size 2465784 diff --git a/build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..b9acd3db16dc4ebea5ce09a574fa067984999e08 --- /dev/null +++ b/build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e6f7798231c197a1c15bd4212a2b678c2e9da729cfe70736670284fbe93481d +size 2465760 diff --git a/build/torch26-cxx11-rocm62-x86_64-linux/activation/_ops.py b/build/torch26-cxx11-rocm62-x86_64-linux/activation/_ops.py index 5e09a02430f82264f549ba881c5d8ad77b0a3eb9..d05d57ec3942d443a97df8990939b323dd96236b 100644 --- a/build/torch26-cxx11-rocm62-x86_64-linux/activation/_ops.py +++ b/build/torch26-cxx11-rocm62-x86_64-linux/activation/_ops.py @@ -1,9 +1,9 @@ import torch -from . import _activation_82352ca_dirty -ops = torch.ops._activation_82352ca_dirty +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 def add_op_namespace_prefix(op_name: str): """ Prefix op by namespace. """ - return f"_activation_82352ca_dirty::{op_name}" \ No newline at end of file + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so b/build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so deleted file mode 100755 index 5fbf67a1a3de376a66db756a13b6c18b0b26fa0d..0000000000000000000000000000000000000000 --- a/build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1f5d622e4f0db5838b4f728795e43318a751886ca68086ec05901f7601cece34 -size 2362752 diff --git a/build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..e5bbb638d04140b7a4acad78854a0ecc12b3377a --- /dev/null +++ b/build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6022fc1e3e7c62a24dafa4ed7504e558b9841fc23c45b711338b746720d42b7c +size 2362720 diff --git a/build/torch26-cxx98-cu118-x86_64-linux/activation/_ops.py b/build/torch26-cxx98-cu118-x86_64-linux/activation/_ops.py index 5e09a02430f82264f549ba881c5d8ad77b0a3eb9..d05d57ec3942d443a97df8990939b323dd96236b 100644 --- a/build/torch26-cxx98-cu118-x86_64-linux/activation/_ops.py +++ b/build/torch26-cxx98-cu118-x86_64-linux/activation/_ops.py @@ -1,9 +1,9 @@ import torch -from . import _activation_82352ca_dirty -ops = torch.ops._activation_82352ca_dirty +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 def add_op_namespace_prefix(op_name: str): """ Prefix op by namespace. """ - return f"_activation_82352ca_dirty::{op_name}" \ No newline at end of file + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so b/build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so deleted file mode 100755 index 1c51299118f3b9c58fa8205c8889d54ee399e3f1..0000000000000000000000000000000000000000 --- a/build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:94cc812ba7c524a5c926f8ea8ec8b7bf7243a9906145995dc27fe009978adbb8 -size 2420344 diff --git a/build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..e2ddeb26d25de96a0da5c609ae39bf6fcb4ec2d4 --- /dev/null +++ b/build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0ac010c25f6d4b8dc5a67cd69868cc0efb3f9b538a3675166f32a128e36bda2d +size 2420312 diff --git a/build/torch26-cxx98-cu124-x86_64-linux/activation/_ops.py b/build/torch26-cxx98-cu124-x86_64-linux/activation/_ops.py index 5e09a02430f82264f549ba881c5d8ad77b0a3eb9..d05d57ec3942d443a97df8990939b323dd96236b 100644 --- a/build/torch26-cxx98-cu124-x86_64-linux/activation/_ops.py +++ b/build/torch26-cxx98-cu124-x86_64-linux/activation/_ops.py @@ -1,9 +1,9 @@ import torch -from . import _activation_82352ca_dirty -ops = torch.ops._activation_82352ca_dirty +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 def add_op_namespace_prefix(op_name: str): """ Prefix op by namespace. """ - return f"_activation_82352ca_dirty::{op_name}" \ No newline at end of file + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so b/build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so deleted file mode 100755 index d693d98fb6a7593c5a42307f3aa89aa50297a49c..0000000000000000000000000000000000000000 --- a/build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:07f30fab40984f829e6ab081a6b771b8202b4d676c6e7156d00d26308fbe4695 -size 2424888 diff --git a/build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..34eeeb59c7603453cb5718d70c89f3262ca4bf2a --- /dev/null +++ b/build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e9479b196d502f4794506798a67dc3472fbc970d29b308f95e5967348c7a5852 +size 2424856 diff --git a/build/torch26-cxx98-cu126-x86_64-linux/activation/_ops.py b/build/torch26-cxx98-cu126-x86_64-linux/activation/_ops.py index 5e09a02430f82264f549ba881c5d8ad77b0a3eb9..d05d57ec3942d443a97df8990939b323dd96236b 100644 --- a/build/torch26-cxx98-cu126-x86_64-linux/activation/_ops.py +++ b/build/torch26-cxx98-cu126-x86_64-linux/activation/_ops.py @@ -1,9 +1,9 @@ import torch -from . import _activation_82352ca_dirty -ops = torch.ops._activation_82352ca_dirty +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 def add_op_namespace_prefix(op_name: str): """ Prefix op by namespace. """ - return f"_activation_82352ca_dirty::{op_name}" \ No newline at end of file + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch27-cxx11-cu118-x86_64-linux/activation/__init__.py b/build/torch27-cxx11-cu118-x86_64-linux/activation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ddb37490dad9d8ffcbeb13ed06b33f03fef8ed78 --- /dev/null +++ b/build/torch27-cxx11-cu118-x86_64-linux/activation/__init__.py @@ -0,0 +1,52 @@ +import torch + +from ._ops import ops + +from . import layers + + +def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: + ops.silu_and_mul(out, x) + return out + + +def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_and_mul(out, x) + return out + + +def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_tanh_and_mul(out, x) + return out + + +def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None: + ops.fatrelu_and_mul(out, x, threshold) + return out + + +def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_fast(out, x) + return out + + +def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_new(out, x) + return out + + +def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_quick(out, x) + return out + + +__all__ = [ + "silu_and_mul", + "gelu_and_mul", + "gelu_tanh_and_mul", + "fatrelu_and_mul", + "gelu_fast", + "gelu_new", + "gelu_quick", + "layers", +] diff --git a/build/torch27-cxx11-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch27-cxx11-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..3d372d7f9bdb65dac82e116b0be278271267b148 --- /dev/null +++ b/build/torch27-cxx11-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b873e65ffe2c83d24f0263eb1ffccc48cfefbabb35599229d0c7f3983d772f2 +size 2370328 diff --git a/build/torch27-cxx11-cu118-x86_64-linux/activation/_ops.py b/build/torch27-cxx11-cu118-x86_64-linux/activation/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..d05d57ec3942d443a97df8990939b323dd96236b --- /dev/null +++ b/build/torch27-cxx11-cu118-x86_64-linux/activation/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch27-cxx11-cu118-x86_64-linux/activation/layers.py b/build/torch27-cxx11-cu118-x86_64-linux/activation/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..99c129e3b1c9ed4c18166d5b5d67eb08f137a27f --- /dev/null +++ b/build/torch27-cxx11-cu118-x86_64-linux/activation/layers.py @@ -0,0 +1,65 @@ +import torch +import torch.nn as nn + +from ._ops import ops + + +class SiluAndMul(nn.Module): + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.silu_and_mul(out, x) + return out + + +class GeluAndMul(nn.Module): + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.gelu_and_mul(out, x) + return out + + +class GeluTanhAndMul(nn.Module): + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.gelu_tanh_and_mul(out, x) + return out + + +class FatreluAndMul(nn.Module): + def __init__(self, threshold: float = 0.0): + super().__init__() + self.threshold = threshold + + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.fatrelu_and_mul(out, x, self.threshold) + return out + + +class FastGELU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.gelu_fast(out, x) + return out + + +class NewGELU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.gelu_new(out, x) + return out + + +class QuickGELU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.gelu_quick(out, x) + return out diff --git a/build/torch27-cxx11-cu126-x86_64-linux/activation/__init__.py b/build/torch27-cxx11-cu126-x86_64-linux/activation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ddb37490dad9d8ffcbeb13ed06b33f03fef8ed78 --- /dev/null +++ b/build/torch27-cxx11-cu126-x86_64-linux/activation/__init__.py @@ -0,0 +1,52 @@ +import torch + +from ._ops import ops + +from . import layers + + +def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: + ops.silu_and_mul(out, x) + return out + + +def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_and_mul(out, x) + return out + + +def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_tanh_and_mul(out, x) + return out + + +def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None: + ops.fatrelu_and_mul(out, x, threshold) + return out + + +def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_fast(out, x) + return out + + +def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_new(out, x) + return out + + +def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_quick(out, x) + return out + + +__all__ = [ + "silu_and_mul", + "gelu_and_mul", + "gelu_tanh_and_mul", + "fatrelu_and_mul", + "gelu_fast", + "gelu_new", + "gelu_quick", + "layers", +] diff --git a/build/torch27-cxx11-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch27-cxx11-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..c78bfb1a51ad4825cf4e553cfb0bfebce15cbf0c --- /dev/null +++ b/build/torch27-cxx11-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:026e76545019c910c6ebb5d5f10af78f6053715c2020577d8baf1c99f752669a +size 2436680 diff --git a/build/torch27-cxx11-cu126-x86_64-linux/activation/_ops.py b/build/torch27-cxx11-cu126-x86_64-linux/activation/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..d05d57ec3942d443a97df8990939b323dd96236b --- /dev/null +++ b/build/torch27-cxx11-cu126-x86_64-linux/activation/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch27-cxx11-cu126-x86_64-linux/activation/layers.py b/build/torch27-cxx11-cu126-x86_64-linux/activation/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..99c129e3b1c9ed4c18166d5b5d67eb08f137a27f --- /dev/null +++ b/build/torch27-cxx11-cu126-x86_64-linux/activation/layers.py @@ -0,0 +1,65 @@ +import torch +import torch.nn as nn + +from ._ops import ops + + +class SiluAndMul(nn.Module): + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.silu_and_mul(out, x) + return out + + +class GeluAndMul(nn.Module): + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.gelu_and_mul(out, x) + return out + + +class GeluTanhAndMul(nn.Module): + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.gelu_tanh_and_mul(out, x) + return out + + +class FatreluAndMul(nn.Module): + def __init__(self, threshold: float = 0.0): + super().__init__() + self.threshold = threshold + + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.fatrelu_and_mul(out, x, self.threshold) + return out + + +class FastGELU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.gelu_fast(out, x) + return out + + +class NewGELU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.gelu_new(out, x) + return out + + +class QuickGELU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.gelu_quick(out, x) + return out diff --git a/build/torch27-cxx11-cu128-x86_64-linux/activation/__init__.py b/build/torch27-cxx11-cu128-x86_64-linux/activation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ddb37490dad9d8ffcbeb13ed06b33f03fef8ed78 --- /dev/null +++ b/build/torch27-cxx11-cu128-x86_64-linux/activation/__init__.py @@ -0,0 +1,52 @@ +import torch + +from ._ops import ops + +from . import layers + + +def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: + ops.silu_and_mul(out, x) + return out + + +def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_and_mul(out, x) + return out + + +def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_tanh_and_mul(out, x) + return out + + +def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None: + ops.fatrelu_and_mul(out, x, threshold) + return out + + +def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_fast(out, x) + return out + + +def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_new(out, x) + return out + + +def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_quick(out, x) + return out + + +__all__ = [ + "silu_and_mul", + "gelu_and_mul", + "gelu_tanh_and_mul", + "fatrelu_and_mul", + "gelu_fast", + "gelu_new", + "gelu_quick", + "layers", +] diff --git a/build/torch27-cxx11-cu128-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch27-cxx11-cu128-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..dd0965a7642c565aeca993f6905f3b46e895ad5e --- /dev/null +++ b/build/torch27-cxx11-cu128-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f4bcf9768b827d6b848ba75a2c8fcc5a4abb6d5b6696185715bc1bb886e3cec1 +size 2401616 diff --git a/build/torch27-cxx11-cu128-x86_64-linux/activation/_ops.py b/build/torch27-cxx11-cu128-x86_64-linux/activation/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..d05d57ec3942d443a97df8990939b323dd96236b --- /dev/null +++ b/build/torch27-cxx11-cu128-x86_64-linux/activation/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch27-cxx11-cu128-x86_64-linux/activation/layers.py b/build/torch27-cxx11-cu128-x86_64-linux/activation/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..99c129e3b1c9ed4c18166d5b5d67eb08f137a27f --- /dev/null +++ b/build/torch27-cxx11-cu128-x86_64-linux/activation/layers.py @@ -0,0 +1,65 @@ +import torch +import torch.nn as nn + +from ._ops import ops + + +class SiluAndMul(nn.Module): + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.silu_and_mul(out, x) + return out + + +class GeluAndMul(nn.Module): + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.gelu_and_mul(out, x) + return out + + +class GeluTanhAndMul(nn.Module): + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.gelu_tanh_and_mul(out, x) + return out + + +class FatreluAndMul(nn.Module): + def __init__(self, threshold: float = 0.0): + super().__init__() + self.threshold = threshold + + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.fatrelu_and_mul(out, x, self.threshold) + return out + + +class FastGELU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.gelu_fast(out, x) + return out + + +class NewGELU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.gelu_new(out, x) + return out + + +class QuickGELU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.gelu_quick(out, x) + return out diff --git a/build/torch27-cxx11-rocm63-x86_64-linux/activation/__init__.py b/build/torch27-cxx11-rocm63-x86_64-linux/activation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ddb37490dad9d8ffcbeb13ed06b33f03fef8ed78 --- /dev/null +++ b/build/torch27-cxx11-rocm63-x86_64-linux/activation/__init__.py @@ -0,0 +1,52 @@ +import torch + +from ._ops import ops + +from . import layers + + +def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: + ops.silu_and_mul(out, x) + return out + + +def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_and_mul(out, x) + return out + + +def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_tanh_and_mul(out, x) + return out + + +def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None: + ops.fatrelu_and_mul(out, x, threshold) + return out + + +def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_fast(out, x) + return out + + +def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_new(out, x) + return out + + +def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None: + ops.gelu_quick(out, x) + return out + + +__all__ = [ + "silu_and_mul", + "gelu_and_mul", + "gelu_tanh_and_mul", + "fatrelu_and_mul", + "gelu_fast", + "gelu_new", + "gelu_quick", + "layers", +] diff --git a/build/torch27-cxx11-rocm63-x86_64-linux/activation/_activation_c444f33.abi3.so b/build/torch27-cxx11-rocm63-x86_64-linux/activation/_activation_c444f33.abi3.so new file mode 100755 index 0000000000000000000000000000000000000000..4857c2835edf6110cb3332610448d6051c1dd916 --- /dev/null +++ b/build/torch27-cxx11-rocm63-x86_64-linux/activation/_activation_c444f33.abi3.so @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d8981cf87279d933db6754cec03a80828fa8e8a28ae787c8ad87c6a8c5424291 +size 2467896 diff --git a/build/torch27-cxx11-rocm63-x86_64-linux/activation/_ops.py b/build/torch27-cxx11-rocm63-x86_64-linux/activation/_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..d05d57ec3942d443a97df8990939b323dd96236b --- /dev/null +++ b/build/torch27-cxx11-rocm63-x86_64-linux/activation/_ops.py @@ -0,0 +1,9 @@ +import torch +from . import _activation_c444f33 +ops = torch.ops._activation_c444f33 + +def add_op_namespace_prefix(op_name: str): + """ + Prefix op by namespace. + """ + return f"_activation_c444f33::{op_name}" \ No newline at end of file diff --git a/build/torch27-cxx11-rocm63-x86_64-linux/activation/layers.py b/build/torch27-cxx11-rocm63-x86_64-linux/activation/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..99c129e3b1c9ed4c18166d5b5d67eb08f137a27f --- /dev/null +++ b/build/torch27-cxx11-rocm63-x86_64-linux/activation/layers.py @@ -0,0 +1,65 @@ +import torch +import torch.nn as nn + +from ._ops import ops + + +class SiluAndMul(nn.Module): + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.silu_and_mul(out, x) + return out + + +class GeluAndMul(nn.Module): + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.gelu_and_mul(out, x) + return out + + +class GeluTanhAndMul(nn.Module): + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.gelu_tanh_and_mul(out, x) + return out + + +class FatreluAndMul(nn.Module): + def __init__(self, threshold: float = 0.0): + super().__init__() + self.threshold = threshold + + def forward(self, x: torch.Tensor): + d = x.shape[-1] // 2 + output_shape = x.shape[:-1] + (d,) + out = torch.empty(output_shape, dtype=x.dtype, device=x.device) + ops.fatrelu_and_mul(out, x, self.threshold) + return out + + +class FastGELU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.gelu_fast(out, x) + return out + + +class NewGELU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.gelu_new(out, x) + return out + + +class QuickGELU(nn.Module): + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = torch.empty_like(x) + ops.gelu_quick(out, x) + return out