Build
Browse files- build/torch25-cxx11-cu118-x86_64-linux/flash_mla/{_flash_mla_ogm5yqv7ipuoy.abi3.so → _flash_mla_d4f4195.abi3.so} +2 -2
- build/torch25-cxx11-cu118-x86_64-linux/flash_mla/_ops.py +3 -3
- build/torch25-cxx11-cu121-x86_64-linux/flash_mla/{_flash_mla_tq7soo7mu65hc.abi3.so → _flash_mla_d4f4195.abi3.so} +2 -2
- build/torch25-cxx11-cu121-x86_64-linux/flash_mla/_ops.py +3 -3
- build/torch25-cxx11-cu124-x86_64-linux/flash_mla/{_flash_mla_jmusvvowiwydi.abi3.so → _flash_mla_d4f4195.abi3.so} +2 -2
- build/torch25-cxx11-cu124-x86_64-linux/flash_mla/_ops.py +3 -3
- build/torch25-cxx98-cu118-x86_64-linux/flash_mla/{_flash_mla_cqxjqwwlf7suu.abi3.so → _flash_mla_d4f4195.abi3.so} +2 -2
- build/torch25-cxx98-cu118-x86_64-linux/flash_mla/_ops.py +3 -3
- build/torch25-cxx98-cu121-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so +3 -0
- build/torch25-cxx98-cu121-x86_64-linux/flash_mla/_flash_mla_uqlcnehkqrgzo.abi3.so +0 -3
- build/torch25-cxx98-cu121-x86_64-linux/flash_mla/_ops.py +3 -3
- build/torch25-cxx98-cu124-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so +3 -0
- build/torch25-cxx98-cu124-x86_64-linux/flash_mla/_flash_mla_iawwb234swad4.abi3.so +0 -3
- build/torch25-cxx98-cu124-x86_64-linux/flash_mla/_ops.py +3 -3
- build/torch26-cxx11-cu118-x86_64-linux/flash_mla/_flash_mla_cb2u2ikyeixke.abi3.so +0 -3
- build/torch26-cxx11-cu118-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so +3 -0
- build/torch26-cxx11-cu118-x86_64-linux/flash_mla/_ops.py +3 -3
- build/torch26-cxx11-cu124-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so +3 -0
- build/torch26-cxx11-cu124-x86_64-linux/flash_mla/_flash_mla_zdwdze3ckbyyo.abi3.so +0 -3
- build/torch26-cxx11-cu124-x86_64-linux/flash_mla/_ops.py +3 -3
- build/torch26-cxx11-cu126-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so +3 -0
- build/torch26-cxx11-cu126-x86_64-linux/flash_mla/_flash_mla_rv7dqsm5x7jvw.abi3.so +0 -3
- build/torch26-cxx11-cu126-x86_64-linux/flash_mla/_ops.py +3 -3
- build/torch26-cxx98-cu118-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so +3 -0
- build/torch26-cxx98-cu118-x86_64-linux/flash_mla/_flash_mla_qgsddurmm4eh6.abi3.so +0 -3
- build/torch26-cxx98-cu118-x86_64-linux/flash_mla/_ops.py +3 -3
- build/torch26-cxx98-cu124-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so +3 -0
- build/torch26-cxx98-cu124-x86_64-linux/flash_mla/_flash_mla_yu2nxxotkj3gw.abi3.so +0 -3
- build/torch26-cxx98-cu124-x86_64-linux/flash_mla/_ops.py +3 -3
- build/torch26-cxx98-cu126-x86_64-linux/flash_mla/_flash_mla_7mo4qaqpi6iae.abi3.so +0 -3
- build/torch26-cxx98-cu126-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so +3 -0
- build/torch26-cxx98-cu126-x86_64-linux/flash_mla/_ops.py +3 -3
- build/torch27-cxx11-cu118-x86_64-linux/flash_mla/__init__.py +33 -0
- build/torch27-cxx11-cu118-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so +3 -0
- build/torch27-cxx11-cu118-x86_64-linux/flash_mla/_ops.py +9 -0
- build/torch27-cxx11-cu126-x86_64-linux/flash_mla/__init__.py +33 -0
- build/torch27-cxx11-cu126-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so +3 -0
- build/torch27-cxx11-cu126-x86_64-linux/flash_mla/_ops.py +9 -0
- build/torch27-cxx11-cu128-x86_64-linux/flash_mla/__init__.py +33 -0
- build/torch27-cxx11-cu128-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so +3 -0
- build/torch27-cxx11-cu128-x86_64-linux/flash_mla/_ops.py +9 -0
build/torch25-cxx11-cu118-x86_64-linux/flash_mla/{_flash_mla_ogm5yqv7ipuoy.abi3.so → _flash_mla_d4f4195.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a9a2b99b276b5aa714b27d1f54cc5da2d451e65a9ed385c583daf528f2c030a9
|
3 |
+
size 2564144
|
build/torch25-cxx11-cu118-x86_64-linux/flash_mla/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch25-cxx11-cu121-x86_64-linux/flash_mla/{_flash_mla_tq7soo7mu65hc.abi3.so → _flash_mla_d4f4195.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:77e241f633fa5b103f379ba6ac58d2cc068e0c3fc4d4f20ac1e1c679fc19614f
|
3 |
+
size 2595176
|
build/torch25-cxx11-cu121-x86_64-linux/flash_mla/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch25-cxx11-cu124-x86_64-linux/flash_mla/{_flash_mla_jmusvvowiwydi.abi3.so → _flash_mla_d4f4195.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:31aa895a57efbd29aeff693b65b02842926bf1788d6f98022c32470a60265f9e
|
3 |
+
size 2580248
|
build/torch25-cxx11-cu124-x86_64-linux/flash_mla/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch25-cxx98-cu118-x86_64-linux/flash_mla/{_flash_mla_cqxjqwwlf7suu.abi3.so → _flash_mla_d4f4195.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7454c10a3b29128e035bdb3fa18d5fc3706f7970542a0bcb55d9714f0999d42f
|
3 |
+
size 2556792
|
build/torch25-cxx98-cu118-x86_64-linux/flash_mla/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch25-cxx98-cu121-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c8cb9402f3091420227cbccf1ec4938a444765e26f5d34c356c76bf7c85630d0
|
3 |
+
size 2587896
|
build/torch25-cxx98-cu121-x86_64-linux/flash_mla/_flash_mla_uqlcnehkqrgzo.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:7d6e13fb4833b751d089ff064509035402190c46e2234dfc7fc9a9f0524df6a1
|
3 |
-
size 2587928
|
|
|
|
|
|
|
|
build/torch25-cxx98-cu121-x86_64-linux/flash_mla/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch25-cxx98-cu124-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bb4be09cbde1979c1aa17e3bc93c1538f129b438d305bee0fe96f3c08efeee04
|
3 |
+
size 2572968
|
build/torch25-cxx98-cu124-x86_64-linux/flash_mla/_flash_mla_iawwb234swad4.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:5c5feacdd7564b407f8ad544d66e9c1bf751a5d72f4142018fe24089663a977c
|
3 |
-
size 2573000
|
|
|
|
|
|
|
|
build/torch25-cxx98-cu124-x86_64-linux/flash_mla/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch26-cxx11-cu118-x86_64-linux/flash_mla/_flash_mla_cb2u2ikyeixke.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:613fbc6946cf9feca9db47dcfdc1a436cf599024d64bf4308da65f3166ee31aa
|
3 |
-
size 2564512
|
|
|
|
|
|
|
|
build/torch26-cxx11-cu118-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:35c67c788220d8988e47cd4ad976495450b71cd682bd8ab08af3db066d625126
|
3 |
+
size 2564496
|
build/torch26-cxx11-cu118-x86_64-linux/flash_mla/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch26-cxx11-cu124-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:069fb3e3a051c91e73390245c7463218829b8decf0f60bd6fc9a0ba8127b5bd2
|
3 |
+
size 2580592
|
build/torch26-cxx11-cu124-x86_64-linux/flash_mla/_flash_mla_zdwdze3ckbyyo.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:f44dd1f22cf30be7062401e4c6b353d3da9d07bb2dbb41b0d81b0733490835ea
|
3 |
-
size 2580608
|
|
|
|
|
|
|
|
build/torch26-cxx11-cu124-x86_64-linux/flash_mla/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch26-cxx11-cu126-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:50fad86fa7bc15096c2a1feadf8091b20e188e32b8c0633423ec26e4e8e8e7ce
|
3 |
+
size 2560552
|
build/torch26-cxx11-cu126-x86_64-linux/flash_mla/_flash_mla_rv7dqsm5x7jvw.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:e3710dfc1a63daecc4e7965555e075e2dfd498c4695b54f10c66ee451931d93a
|
3 |
-
size 2560576
|
|
|
|
|
|
|
|
build/torch26-cxx11-cu126-x86_64-linux/flash_mla/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch26-cxx98-cu118-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ae937ddfbc3e6097b2fdd9197f2ddb5b9f66c65146a4de30ccab59dab6e18dd4
|
3 |
+
size 2557136
|
build/torch26-cxx98-cu118-x86_64-linux/flash_mla/_flash_mla_qgsddurmm4eh6.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:d61051eaa3e3359c8a5fc3b781df27b7aa90c4733f31c90836414fa629a1d1a8
|
3 |
-
size 2557160
|
|
|
|
|
|
|
|
build/torch26-cxx98-cu118-x86_64-linux/flash_mla/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch26-cxx98-cu124-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:503910324475f8bd9dab47687339005f58e5b623bf0c9e4234fabf099c08da33
|
3 |
+
size 2573312
|
build/torch26-cxx98-cu124-x86_64-linux/flash_mla/_flash_mla_yu2nxxotkj3gw.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:c746b130a2057801ed54afd41147a45a330f5867755c7efa0ecf9cf6b0d3a2d0
|
3 |
-
size 2573344
|
|
|
|
|
|
|
|
build/torch26-cxx98-cu124-x86_64-linux/flash_mla/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch26-cxx98-cu126-x86_64-linux/flash_mla/_flash_mla_7mo4qaqpi6iae.abi3.so
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:7372523fd91f328f78058f257d8ceff5f2dfc1b783b1bf4d6a4836c237fe7147
|
3 |
-
size 2553304
|
|
|
|
|
|
|
|
build/torch26-cxx98-cu126-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2c41fa4058ee2bb5d3d90458a7f92f0ef1c10e8bc854329cf7c208025bb244b2
|
3 |
+
size 2553280
|
build/torch26-cxx98-cu126-x86_64-linux/flash_mla/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch27-cxx11-cu118-x86_64-linux/flash_mla/__init__.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
|
3 |
+
from ._ops import ops
|
4 |
+
|
5 |
+
|
6 |
+
def get_mla_metadata(seqlens_k: torch.Tensor, s_q: int, h_kv: int):
|
7 |
+
return ops.get_mla_metadata(seqlens_k, s_q, h_kv)
|
8 |
+
|
9 |
+
|
10 |
+
def mha_fwd_kvcache_mla(
|
11 |
+
q: torch.Tensor,
|
12 |
+
kcache: torch.Tensor,
|
13 |
+
vcache_: torch.Tensor,
|
14 |
+
head_size_v: int,
|
15 |
+
seqlens_k: torch.Tensor,
|
16 |
+
block_table: torch.Tensor,
|
17 |
+
softmax_scale: float,
|
18 |
+
is_causal_: bool,
|
19 |
+
tile_scheduler_metadata: torch.Tensor,
|
20 |
+
num_splits: torch.Tensor,
|
21 |
+
) -> torch.Tensor:
|
22 |
+
return ops.mha_fwd_kvcache_mla(
|
23 |
+
q,
|
24 |
+
kcache,
|
25 |
+
vcache_,
|
26 |
+
head_size_v,
|
27 |
+
seqlens_k,
|
28 |
+
block_table,
|
29 |
+
softmax_scale,
|
30 |
+
is_causal_,
|
31 |
+
tile_scheduler_metadata,
|
32 |
+
num_splits
|
33 |
+
)
|
build/torch27-cxx11-cu118-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:59c4034880f4482b06e447a2c4810aaf8009b7d4c86a4fd71356f169df986535
|
3 |
+
size 2564632
|
build/torch27-cxx11-cu118-x86_64-linux/flash_mla/_ops.py
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
+
|
5 |
+
def add_op_namespace_prefix(op_name: str):
|
6 |
+
"""
|
7 |
+
Prefix op by namespace.
|
8 |
+
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch27-cxx11-cu126-x86_64-linux/flash_mla/__init__.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
|
3 |
+
from ._ops import ops
|
4 |
+
|
5 |
+
|
6 |
+
def get_mla_metadata(seqlens_k: torch.Tensor, s_q: int, h_kv: int):
|
7 |
+
return ops.get_mla_metadata(seqlens_k, s_q, h_kv)
|
8 |
+
|
9 |
+
|
10 |
+
def mha_fwd_kvcache_mla(
|
11 |
+
q: torch.Tensor,
|
12 |
+
kcache: torch.Tensor,
|
13 |
+
vcache_: torch.Tensor,
|
14 |
+
head_size_v: int,
|
15 |
+
seqlens_k: torch.Tensor,
|
16 |
+
block_table: torch.Tensor,
|
17 |
+
softmax_scale: float,
|
18 |
+
is_causal_: bool,
|
19 |
+
tile_scheduler_metadata: torch.Tensor,
|
20 |
+
num_splits: torch.Tensor,
|
21 |
+
) -> torch.Tensor:
|
22 |
+
return ops.mha_fwd_kvcache_mla(
|
23 |
+
q,
|
24 |
+
kcache,
|
25 |
+
vcache_,
|
26 |
+
head_size_v,
|
27 |
+
seqlens_k,
|
28 |
+
block_table,
|
29 |
+
softmax_scale,
|
30 |
+
is_causal_,
|
31 |
+
tile_scheduler_metadata,
|
32 |
+
num_splits
|
33 |
+
)
|
build/torch27-cxx11-cu126-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5db69ef4975e2eee001e6a9b7466c1fe40bc2228ed64eb8c24caf3e0fb6ed0b2
|
3 |
+
size 2560584
|
build/torch27-cxx11-cu126-x86_64-linux/flash_mla/_ops.py
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
+
|
5 |
+
def add_op_namespace_prefix(op_name: str):
|
6 |
+
"""
|
7 |
+
Prefix op by namespace.
|
8 |
+
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|
build/torch27-cxx11-cu128-x86_64-linux/flash_mla/__init__.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
|
3 |
+
from ._ops import ops
|
4 |
+
|
5 |
+
|
6 |
+
def get_mla_metadata(seqlens_k: torch.Tensor, s_q: int, h_kv: int):
|
7 |
+
return ops.get_mla_metadata(seqlens_k, s_q, h_kv)
|
8 |
+
|
9 |
+
|
10 |
+
def mha_fwd_kvcache_mla(
|
11 |
+
q: torch.Tensor,
|
12 |
+
kcache: torch.Tensor,
|
13 |
+
vcache_: torch.Tensor,
|
14 |
+
head_size_v: int,
|
15 |
+
seqlens_k: torch.Tensor,
|
16 |
+
block_table: torch.Tensor,
|
17 |
+
softmax_scale: float,
|
18 |
+
is_causal_: bool,
|
19 |
+
tile_scheduler_metadata: torch.Tensor,
|
20 |
+
num_splits: torch.Tensor,
|
21 |
+
) -> torch.Tensor:
|
22 |
+
return ops.mha_fwd_kvcache_mla(
|
23 |
+
q,
|
24 |
+
kcache,
|
25 |
+
vcache_,
|
26 |
+
head_size_v,
|
27 |
+
seqlens_k,
|
28 |
+
block_table,
|
29 |
+
softmax_scale,
|
30 |
+
is_causal_,
|
31 |
+
tile_scheduler_metadata,
|
32 |
+
num_splits
|
33 |
+
)
|
build/torch27-cxx11-cu128-x86_64-linux/flash_mla/_flash_mla_d4f4195.abi3.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3fc7eb9341c975d0e313d837977ca3ed13556e6fe63926e0bf117f62499ea052
|
3 |
+
size 2615448
|
build/torch27-cxx11-cu128-x86_64-linux/flash_mla/_ops.py
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from . import _flash_mla_d4f4195
|
3 |
+
ops = torch.ops._flash_mla_d4f4195
|
4 |
+
|
5 |
+
def add_op_namespace_prefix(op_name: str):
|
6 |
+
"""
|
7 |
+
Prefix op by namespace.
|
8 |
+
"""
|
9 |
+
return f"_flash_mla_d4f4195::{op_name}"
|