danieldk HF Staff commited on
Commit
fff932d
·
1 Parent(s): c444f33
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. build/torch25-cxx11-cu118-x86_64-linux/activation/{_activation_82352ca_dirty.abi3.so → _activation_c444f33.abi3.so} +2 -2
  2. build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py +3 -3
  3. build/torch25-cxx11-cu121-x86_64-linux/activation/{_activation_82352ca_dirty.abi3.so → _activation_c444f33.abi3.so} +2 -2
  4. build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py +3 -3
  5. build/torch25-cxx11-cu124-x86_64-linux/activation/{_activation_82352ca_dirty.abi3.so → _activation_c444f33.abi3.so} +2 -2
  6. build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py +3 -3
  7. build/torch25-cxx98-cu118-x86_64-linux/activation/{_activation_82352ca_dirty.abi3.so → _activation_c444f33.abi3.so} +2 -2
  8. build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py +3 -3
  9. build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +0 -3
  10. build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_c444f33.abi3.so +3 -0
  11. build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py +3 -3
  12. build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +0 -3
  13. build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so +3 -0
  14. build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py +3 -3
  15. build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +0 -3
  16. build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so +3 -0
  17. build/torch26-cxx11-cu118-x86_64-linux/activation/_ops.py +3 -3
  18. build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +0 -3
  19. build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so +3 -0
  20. build/torch26-cxx11-cu124-x86_64-linux/activation/_ops.py +3 -3
  21. build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +0 -3
  22. build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so +3 -0
  23. build/torch26-cxx11-cu126-x86_64-linux/activation/_ops.py +3 -3
  24. build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +0 -3
  25. build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_c444f33.abi3.so +3 -0
  26. build/torch26-cxx11-rocm62-x86_64-linux/activation/_ops.py +3 -3
  27. build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +0 -3
  28. build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so +3 -0
  29. build/torch26-cxx98-cu118-x86_64-linux/activation/_ops.py +3 -3
  30. build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +0 -3
  31. build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so +3 -0
  32. build/torch26-cxx98-cu124-x86_64-linux/activation/_ops.py +3 -3
  33. build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so +0 -3
  34. build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so +3 -0
  35. build/torch26-cxx98-cu126-x86_64-linux/activation/_ops.py +3 -3
  36. build/torch27-cxx11-cu118-x86_64-linux/activation/__init__.py +52 -0
  37. build/torch27-cxx11-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so +3 -0
  38. build/torch27-cxx11-cu118-x86_64-linux/activation/_ops.py +9 -0
  39. build/torch27-cxx11-cu118-x86_64-linux/activation/layers.py +65 -0
  40. build/torch27-cxx11-cu126-x86_64-linux/activation/__init__.py +52 -0
  41. build/torch27-cxx11-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so +3 -0
  42. build/torch27-cxx11-cu126-x86_64-linux/activation/_ops.py +9 -0
  43. build/torch27-cxx11-cu126-x86_64-linux/activation/layers.py +65 -0
  44. build/torch27-cxx11-cu128-x86_64-linux/activation/__init__.py +52 -0
  45. build/torch27-cxx11-cu128-x86_64-linux/activation/_activation_c444f33.abi3.so +3 -0
  46. build/torch27-cxx11-cu128-x86_64-linux/activation/_ops.py +9 -0
  47. build/torch27-cxx11-cu128-x86_64-linux/activation/layers.py +65 -0
  48. build/torch27-cxx11-rocm63-x86_64-linux/activation/__init__.py +52 -0
  49. build/torch27-cxx11-rocm63-x86_64-linux/activation/_activation_c444f33.abi3.so +3 -0
  50. build/torch27-cxx11-rocm63-x86_64-linux/activation/_ops.py +9 -0
build/torch25-cxx11-cu118-x86_64-linux/activation/{_activation_82352ca_dirty.abi3.so → _activation_c444f33.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5280ad24a57b6590fde1d1c9304a626c920f709044f8f57b89c861ffe5709190
3
- size 2370160
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a2958ebcd7f64aad946ac6145d6ac54e6d660578a952a7ad8835a074a88053a
3
+ size 2370128
build/torch25-cxx11-cu118-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_82352ca_dirty
3
- ops = torch.ops._activation_82352ca_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_82352ca_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_c444f33::{op_name}"
build/torch25-cxx11-cu121-x86_64-linux/activation/{_activation_82352ca_dirty.abi3.so → _activation_c444f33.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e1f82faff6c6ff8b24b2b1759d0cf27834f4ee12774c8db65118dc00d1a0b7ea
3
- size 2393264
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:656aacfa25e45193b75506f0787757384bdc93be210c9631bdcea6a0886eb64e
3
+ size 2393232
build/torch25-cxx11-cu121-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_82352ca_dirty
3
- ops = torch.ops._activation_82352ca_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_82352ca_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_c444f33::{op_name}"
build/torch25-cxx11-cu124-x86_64-linux/activation/{_activation_82352ca_dirty.abi3.so → _activation_c444f33.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bbf780e4050fdc1afb692a7242b8908be49de3119f32f8973dd9aca446cb3345
3
- size 2427936
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9687c3d2339f75efbd24327b6f405e706cc6dba4607bdce66901c7578f610a17
3
+ size 2427912
build/torch25-cxx11-cu124-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_82352ca_dirty
3
- ops = torch.ops._activation_82352ca_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_82352ca_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_c444f33::{op_name}"
build/torch25-cxx98-cu118-x86_64-linux/activation/{_activation_82352ca_dirty.abi3.so → _activation_c444f33.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5426caec78aa0c83a75ea73be6944be3054b8059714898ea7ed8deaaaff09750
3
- size 2362600
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4856dac4839c5ea477c15b9a71817b67703942edfd932ef0802b9cbb979e11b7
3
+ size 2362568
build/torch25-cxx98-cu118-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_82352ca_dirty
3
- ops = torch.ops._activation_82352ca_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_82352ca_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_c444f33::{op_name}"
build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:931fc38164f712f97fc299609f1f21a8efa009b355bbec691f6f6d95b2760fc7
3
- size 2385440
 
 
 
 
build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_c444f33.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2061b1f6047a9fe847954fd28d0aae6fb820d2e71dc024a498b370bc2386dcc6
3
+ size 2385408
build/torch25-cxx98-cu121-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_82352ca_dirty
3
- ops = torch.ops._activation_82352ca_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_82352ca_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_c444f33::{op_name}"
build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:79a354ed436f6084e3d77e5fdf1408182f4a85c9db190068c59f37fd95237133
3
- size 2420192
 
 
 
 
build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ccf34c60f89c6ff7d6ef018074545d5b393a53c7c18ee7f83cc92e0c11a6b78
3
+ size 2420168
build/torch25-cxx98-cu124-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_82352ca_dirty
3
- ops = torch.ops._activation_82352ca_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_82352ca_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_c444f33::{op_name}"
build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5152eb865572482ef3e08cbe3daa5ca787273df76d32b6ab99fe8e29b660ae27
3
- size 2370264
 
 
 
 
build/torch26-cxx11-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ac74565abc94658e2bf251e0d9b65ec8b5baaab62f1c1d5a4b216b929540bbd
3
+ size 2370232
build/torch26-cxx11-cu118-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_82352ca_dirty
3
- ops = torch.ops._activation_82352ca_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_82352ca_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_c444f33::{op_name}"
build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:1c5b950db6a6057ecf572068e85e48d9e587f131e42f0ff27f69d24866129035
3
- size 2428040
 
 
 
 
build/torch26-cxx11-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25a10f905f10a5f99d85df4a6b71434a23f827d2a53bd5cdaa71eb40c631e1e9
3
+ size 2428008
build/torch26-cxx11-cu124-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_82352ca_dirty
3
- ops = torch.ops._activation_82352ca_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_82352ca_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_c444f33::{op_name}"
build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:21257c16027980c8f66248c0582b8a0cd864bf20e05c736465bebc5f1f7deb9c
3
- size 2436672
 
 
 
 
build/torch26-cxx11-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a3797124cb8165368b885542518f3dedacd41aeb22681477f4e71ef05e1f006
3
+ size 2436648
build/torch26-cxx11-cu126-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_82352ca_dirty
3
- ops = torch.ops._activation_82352ca_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_82352ca_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_c444f33::{op_name}"
build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:20dcdfa632a9c0c3f51e45829a169c04aa56d1d626a29185cf56596a4d44343a
3
- size 2465784
 
 
 
 
build/torch26-cxx11-rocm62-x86_64-linux/activation/_activation_c444f33.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e6f7798231c197a1c15bd4212a2b678c2e9da729cfe70736670284fbe93481d
3
+ size 2465760
build/torch26-cxx11-rocm62-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_82352ca_dirty
3
- ops = torch.ops._activation_82352ca_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_82352ca_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_c444f33::{op_name}"
build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:1f5d622e4f0db5838b4f728795e43318a751886ca68086ec05901f7601cece34
3
- size 2362752
 
 
 
 
build/torch26-cxx98-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6022fc1e3e7c62a24dafa4ed7504e558b9841fc23c45b711338b746720d42b7c
3
+ size 2362720
build/torch26-cxx98-cu118-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_82352ca_dirty
3
- ops = torch.ops._activation_82352ca_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_82352ca_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_c444f33::{op_name}"
build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:94cc812ba7c524a5c926f8ea8ec8b7bf7243a9906145995dc27fe009978adbb8
3
- size 2420344
 
 
 
 
build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_c444f33.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ac010c25f6d4b8dc5a67cd69868cc0efb3f9b538a3675166f32a128e36bda2d
3
+ size 2420312
build/torch26-cxx98-cu124-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_82352ca_dirty
3
- ops = torch.ops._activation_82352ca_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_82352ca_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_c444f33::{op_name}"
build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_82352ca_dirty.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:07f30fab40984f829e6ab081a6b771b8202b4d676c6e7156d00d26308fbe4695
3
- size 2424888
 
 
 
 
build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9479b196d502f4794506798a67dc3472fbc970d29b308f95e5967348c7a5852
3
+ size 2424856
build/torch26-cxx98-cu126-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_82352ca_dirty
3
- ops = torch.ops._activation_82352ca_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_82352ca_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_c444f33::{op_name}"
build/torch27-cxx11-cu118-x86_64-linux/activation/__init__.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ from ._ops import ops
4
+
5
+ from . import layers
6
+
7
+
8
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
9
+ ops.silu_and_mul(out, x)
10
+ return out
11
+
12
+
13
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
+ ops.gelu_and_mul(out, x)
15
+ return out
16
+
17
+
18
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_tanh_and_mul(out, x)
20
+ return out
21
+
22
+
23
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
24
+ ops.fatrelu_and_mul(out, x, threshold)
25
+ return out
26
+
27
+
28
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
29
+ ops.gelu_fast(out, x)
30
+ return out
31
+
32
+
33
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
34
+ ops.gelu_new(out, x)
35
+ return out
36
+
37
+
38
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
39
+ ops.gelu_quick(out, x)
40
+ return out
41
+
42
+
43
+ __all__ = [
44
+ "silu_and_mul",
45
+ "gelu_and_mul",
46
+ "gelu_tanh_and_mul",
47
+ "fatrelu_and_mul",
48
+ "gelu_fast",
49
+ "gelu_new",
50
+ "gelu_quick",
51
+ "layers",
52
+ ]
build/torch27-cxx11-cu118-x86_64-linux/activation/_activation_c444f33.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b873e65ffe2c83d24f0263eb1ffccc48cfefbabb35599229d0c7f3983d772f2
3
+ size 2370328
build/torch27-cxx11-cu118-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
+
5
+ def add_op_namespace_prefix(op_name: str):
6
+ """
7
+ Prefix op by namespace.
8
+ """
9
+ return f"_activation_c444f33::{op_name}"
build/torch27-cxx11-cu118-x86_64-linux/activation/layers.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+
4
+ from ._ops import ops
5
+
6
+
7
+ class SiluAndMul(nn.Module):
8
+ def forward(self, x: torch.Tensor):
9
+ d = x.shape[-1] // 2
10
+ output_shape = x.shape[:-1] + (d,)
11
+ out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
12
+ ops.silu_and_mul(out, x)
13
+ return out
14
+
15
+
16
+ class GeluAndMul(nn.Module):
17
+ def forward(self, x: torch.Tensor):
18
+ d = x.shape[-1] // 2
19
+ output_shape = x.shape[:-1] + (d,)
20
+ out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ class GeluTanhAndMul(nn.Module):
26
+ def forward(self, x: torch.Tensor):
27
+ d = x.shape[-1] // 2
28
+ output_shape = x.shape[:-1] + (d,)
29
+ out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
30
+ ops.gelu_tanh_and_mul(out, x)
31
+ return out
32
+
33
+
34
+ class FatreluAndMul(nn.Module):
35
+ def __init__(self, threshold: float = 0.0):
36
+ super().__init__()
37
+ self.threshold = threshold
38
+
39
+ def forward(self, x: torch.Tensor):
40
+ d = x.shape[-1] // 2
41
+ output_shape = x.shape[:-1] + (d,)
42
+ out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
43
+ ops.fatrelu_and_mul(out, x, self.threshold)
44
+ return out
45
+
46
+
47
+ class FastGELU(nn.Module):
48
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
49
+ out = torch.empty_like(x)
50
+ ops.gelu_fast(out, x)
51
+ return out
52
+
53
+
54
+ class NewGELU(nn.Module):
55
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
56
+ out = torch.empty_like(x)
57
+ ops.gelu_new(out, x)
58
+ return out
59
+
60
+
61
+ class QuickGELU(nn.Module):
62
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
63
+ out = torch.empty_like(x)
64
+ ops.gelu_quick(out, x)
65
+ return out
build/torch27-cxx11-cu126-x86_64-linux/activation/__init__.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ from ._ops import ops
4
+
5
+ from . import layers
6
+
7
+
8
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
9
+ ops.silu_and_mul(out, x)
10
+ return out
11
+
12
+
13
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
+ ops.gelu_and_mul(out, x)
15
+ return out
16
+
17
+
18
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_tanh_and_mul(out, x)
20
+ return out
21
+
22
+
23
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
24
+ ops.fatrelu_and_mul(out, x, threshold)
25
+ return out
26
+
27
+
28
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
29
+ ops.gelu_fast(out, x)
30
+ return out
31
+
32
+
33
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
34
+ ops.gelu_new(out, x)
35
+ return out
36
+
37
+
38
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
39
+ ops.gelu_quick(out, x)
40
+ return out
41
+
42
+
43
+ __all__ = [
44
+ "silu_and_mul",
45
+ "gelu_and_mul",
46
+ "gelu_tanh_and_mul",
47
+ "fatrelu_and_mul",
48
+ "gelu_fast",
49
+ "gelu_new",
50
+ "gelu_quick",
51
+ "layers",
52
+ ]
build/torch27-cxx11-cu126-x86_64-linux/activation/_activation_c444f33.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:026e76545019c910c6ebb5d5f10af78f6053715c2020577d8baf1c99f752669a
3
+ size 2436680
build/torch27-cxx11-cu126-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
+
5
+ def add_op_namespace_prefix(op_name: str):
6
+ """
7
+ Prefix op by namespace.
8
+ """
9
+ return f"_activation_c444f33::{op_name}"
build/torch27-cxx11-cu126-x86_64-linux/activation/layers.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+
4
+ from ._ops import ops
5
+
6
+
7
+ class SiluAndMul(nn.Module):
8
+ def forward(self, x: torch.Tensor):
9
+ d = x.shape[-1] // 2
10
+ output_shape = x.shape[:-1] + (d,)
11
+ out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
12
+ ops.silu_and_mul(out, x)
13
+ return out
14
+
15
+
16
+ class GeluAndMul(nn.Module):
17
+ def forward(self, x: torch.Tensor):
18
+ d = x.shape[-1] // 2
19
+ output_shape = x.shape[:-1] + (d,)
20
+ out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ class GeluTanhAndMul(nn.Module):
26
+ def forward(self, x: torch.Tensor):
27
+ d = x.shape[-1] // 2
28
+ output_shape = x.shape[:-1] + (d,)
29
+ out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
30
+ ops.gelu_tanh_and_mul(out, x)
31
+ return out
32
+
33
+
34
+ class FatreluAndMul(nn.Module):
35
+ def __init__(self, threshold: float = 0.0):
36
+ super().__init__()
37
+ self.threshold = threshold
38
+
39
+ def forward(self, x: torch.Tensor):
40
+ d = x.shape[-1] // 2
41
+ output_shape = x.shape[:-1] + (d,)
42
+ out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
43
+ ops.fatrelu_and_mul(out, x, self.threshold)
44
+ return out
45
+
46
+
47
+ class FastGELU(nn.Module):
48
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
49
+ out = torch.empty_like(x)
50
+ ops.gelu_fast(out, x)
51
+ return out
52
+
53
+
54
+ class NewGELU(nn.Module):
55
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
56
+ out = torch.empty_like(x)
57
+ ops.gelu_new(out, x)
58
+ return out
59
+
60
+
61
+ class QuickGELU(nn.Module):
62
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
63
+ out = torch.empty_like(x)
64
+ ops.gelu_quick(out, x)
65
+ return out
build/torch27-cxx11-cu128-x86_64-linux/activation/__init__.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ from ._ops import ops
4
+
5
+ from . import layers
6
+
7
+
8
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
9
+ ops.silu_and_mul(out, x)
10
+ return out
11
+
12
+
13
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
+ ops.gelu_and_mul(out, x)
15
+ return out
16
+
17
+
18
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_tanh_and_mul(out, x)
20
+ return out
21
+
22
+
23
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
24
+ ops.fatrelu_and_mul(out, x, threshold)
25
+ return out
26
+
27
+
28
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
29
+ ops.gelu_fast(out, x)
30
+ return out
31
+
32
+
33
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
34
+ ops.gelu_new(out, x)
35
+ return out
36
+
37
+
38
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
39
+ ops.gelu_quick(out, x)
40
+ return out
41
+
42
+
43
+ __all__ = [
44
+ "silu_and_mul",
45
+ "gelu_and_mul",
46
+ "gelu_tanh_and_mul",
47
+ "fatrelu_and_mul",
48
+ "gelu_fast",
49
+ "gelu_new",
50
+ "gelu_quick",
51
+ "layers",
52
+ ]
build/torch27-cxx11-cu128-x86_64-linux/activation/_activation_c444f33.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4bcf9768b827d6b848ba75a2c8fcc5a4abb6d5b6696185715bc1bb886e3cec1
3
+ size 2401616
build/torch27-cxx11-cu128-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
+
5
+ def add_op_namespace_prefix(op_name: str):
6
+ """
7
+ Prefix op by namespace.
8
+ """
9
+ return f"_activation_c444f33::{op_name}"
build/torch27-cxx11-cu128-x86_64-linux/activation/layers.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+
4
+ from ._ops import ops
5
+
6
+
7
+ class SiluAndMul(nn.Module):
8
+ def forward(self, x: torch.Tensor):
9
+ d = x.shape[-1] // 2
10
+ output_shape = x.shape[:-1] + (d,)
11
+ out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
12
+ ops.silu_and_mul(out, x)
13
+ return out
14
+
15
+
16
+ class GeluAndMul(nn.Module):
17
+ def forward(self, x: torch.Tensor):
18
+ d = x.shape[-1] // 2
19
+ output_shape = x.shape[:-1] + (d,)
20
+ out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ class GeluTanhAndMul(nn.Module):
26
+ def forward(self, x: torch.Tensor):
27
+ d = x.shape[-1] // 2
28
+ output_shape = x.shape[:-1] + (d,)
29
+ out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
30
+ ops.gelu_tanh_and_mul(out, x)
31
+ return out
32
+
33
+
34
+ class FatreluAndMul(nn.Module):
35
+ def __init__(self, threshold: float = 0.0):
36
+ super().__init__()
37
+ self.threshold = threshold
38
+
39
+ def forward(self, x: torch.Tensor):
40
+ d = x.shape[-1] // 2
41
+ output_shape = x.shape[:-1] + (d,)
42
+ out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
43
+ ops.fatrelu_and_mul(out, x, self.threshold)
44
+ return out
45
+
46
+
47
+ class FastGELU(nn.Module):
48
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
49
+ out = torch.empty_like(x)
50
+ ops.gelu_fast(out, x)
51
+ return out
52
+
53
+
54
+ class NewGELU(nn.Module):
55
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
56
+ out = torch.empty_like(x)
57
+ ops.gelu_new(out, x)
58
+ return out
59
+
60
+
61
+ class QuickGELU(nn.Module):
62
+ def forward(self, x: torch.Tensor) -> torch.Tensor:
63
+ out = torch.empty_like(x)
64
+ ops.gelu_quick(out, x)
65
+ return out
build/torch27-cxx11-rocm63-x86_64-linux/activation/__init__.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ from ._ops import ops
4
+
5
+ from . import layers
6
+
7
+
8
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
9
+ ops.silu_and_mul(out, x)
10
+ return out
11
+
12
+
13
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
14
+ ops.gelu_and_mul(out, x)
15
+ return out
16
+
17
+
18
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
+ ops.gelu_tanh_and_mul(out, x)
20
+ return out
21
+
22
+
23
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
24
+ ops.fatrelu_and_mul(out, x, threshold)
25
+ return out
26
+
27
+
28
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
29
+ ops.gelu_fast(out, x)
30
+ return out
31
+
32
+
33
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
34
+ ops.gelu_new(out, x)
35
+ return out
36
+
37
+
38
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
39
+ ops.gelu_quick(out, x)
40
+ return out
41
+
42
+
43
+ __all__ = [
44
+ "silu_and_mul",
45
+ "gelu_and_mul",
46
+ "gelu_tanh_and_mul",
47
+ "fatrelu_and_mul",
48
+ "gelu_fast",
49
+ "gelu_new",
50
+ "gelu_quick",
51
+ "layers",
52
+ ]
build/torch27-cxx11-rocm63-x86_64-linux/activation/_activation_c444f33.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8981cf87279d933db6754cec03a80828fa8e8a28ae787c8ad87c6a8c5424291
3
+ size 2467896
build/torch27-cxx11-rocm63-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from . import _activation_c444f33
3
+ ops = torch.ops._activation_c444f33
4
+
5
+ def add_op_namespace_prefix(op_name: str):
6
+ """
7
+ Prefix op by namespace.
8
+ """
9
+ return f"_activation_c444f33::{op_name}"