Build (aarch64-linux)
Browse files- build/torch26-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc +0 -0
- build/torch26-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc +0 -0
- build/torch26-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc +0 -0
- build/torch26-cxx11-cu126-aarch64-linux/paged_attention/_ops.py +3 -3
- build/torch26-cxx11-cu126-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so} +2 -2
- build/torch26-cxx11-cu126-aarch64-linux/paged_attention/platforms.py +31 -1
- build/torch26-cxx98-cu126-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc +0 -0
- build/torch26-cxx98-cu126-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc +0 -0
- build/torch26-cxx98-cu126-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc +0 -0
- build/torch26-cxx98-cu126-aarch64-linux/paged_attention/_ops.py +3 -3
- build/torch26-cxx98-cu126-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so} +2 -2
- build/torch26-cxx98-cu126-aarch64-linux/paged_attention/platforms.py +31 -1
- build/torch27-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc +0 -0
- build/torch27-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc +0 -0
- build/torch27-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc +0 -0
- build/torch27-cxx11-cu126-aarch64-linux/paged_attention/_ops.py +3 -3
- build/torch27-cxx11-cu126-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so} +2 -2
- build/torch27-cxx11-cu126-aarch64-linux/paged_attention/platforms.py +31 -1
- build/torch27-cxx11-cu128-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc +0 -0
- build/torch27-cxx11-cu128-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc +0 -0
- build/torch27-cxx11-cu128-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc +0 -0
- build/torch27-cxx11-cu128-aarch64-linux/paged_attention/_ops.py +3 -3
- build/torch27-cxx11-cu128-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so} +2 -2
- build/torch27-cxx11-cu128-aarch64-linux/paged_attention/platforms.py +31 -1
build/torch26-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc
ADDED
Binary file (510 Bytes). View file
|
|
build/torch26-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc
ADDED
Binary file (4.71 kB). View file
|
|
build/torch26-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc
ADDED
Binary file (548 Bytes). View file
|
|
build/torch26-cxx11-cu126-aarch64-linux/paged_attention/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _paged_attention_6677800
|
3 |
+
ops = torch.ops._paged_attention_6677800
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_paged_attention_6677800::{op_name}"
|
build/torch26-cxx11-cu126-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4b860f51a86eb13c6c7a6e74f28c59abf0b73245f241744766029fbda53cba87
|
3 |
+
size 88169704
|
build/torch26-cxx11-cu126-aarch64-linux/paged_attention/platforms.py
CHANGED
@@ -8,6 +8,7 @@ import numpy as np
|
|
8 |
import torch
|
9 |
|
10 |
IS_ROCM = torch.version.hip is not None
|
|
|
11 |
|
12 |
|
13 |
class Platform(ABC):
|
@@ -32,6 +33,9 @@ class Platform(ABC):
|
|
32 |
@abstractmethod
|
33 |
def is_rocm(self) -> bool: ...
|
34 |
|
|
|
|
|
|
|
35 |
|
36 |
class CudaPlatform(Platform):
|
37 |
@classmethod
|
@@ -45,6 +49,9 @@ class CudaPlatform(Platform):
|
|
45 |
def is_rocm(self) -> bool:
|
46 |
return False
|
47 |
|
|
|
|
|
|
|
48 |
|
49 |
class RocmPlatform(Platform):
|
50 |
@classmethod
|
@@ -58,5 +65,28 @@ class RocmPlatform(Platform):
|
|
58 |
def is_rocm(self) -> bool:
|
59 |
return True
|
60 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
-
current_platform =
|
|
|
|
|
|
|
|
|
|
|
|
8 |
import torch
|
9 |
|
10 |
IS_ROCM = torch.version.hip is not None
|
11 |
+
IS_MPS = torch.backends.mps.is_available()
|
12 |
|
13 |
|
14 |
class Platform(ABC):
|
|
|
33 |
@abstractmethod
|
34 |
def is_rocm(self) -> bool: ...
|
35 |
|
36 |
+
@abstractmethod
|
37 |
+
def is_mps(self) -> bool: ...
|
38 |
+
|
39 |
|
40 |
class CudaPlatform(Platform):
|
41 |
@classmethod
|
|
|
49 |
def is_rocm(self) -> bool:
|
50 |
return False
|
51 |
|
52 |
+
def is_mps(self) -> bool:
|
53 |
+
return False
|
54 |
+
|
55 |
|
56 |
class RocmPlatform(Platform):
|
57 |
@classmethod
|
|
|
65 |
def is_rocm(self) -> bool:
|
66 |
return True
|
67 |
|
68 |
+
def is_mps(self) -> bool:
|
69 |
+
return False
|
70 |
+
|
71 |
+
|
72 |
+
class MpsPlatform(Platform):
|
73 |
+
@classmethod
|
74 |
+
@lru_cache(maxsize=8)
|
75 |
+
def get_device_name(cls, device_id: int = 0) -> str:
|
76 |
+
return torch.cuda.get_device_name(device_id)
|
77 |
+
|
78 |
+
def is_cuda(self) -> bool:
|
79 |
+
return False
|
80 |
+
|
81 |
+
def is_rocm(self) -> bool:
|
82 |
+
return False
|
83 |
+
|
84 |
+
def is_mps(self) -> bool:
|
85 |
+
return True
|
86 |
|
87 |
+
current_platform = (
|
88 |
+
RocmPlatform() if IS_ROCM else
|
89 |
+
MpsPlatform() if IS_MPS else
|
90 |
+
CudaPlatform() if torch.cuda.is_available() else
|
91 |
+
None
|
92 |
+
)
|
build/torch26-cxx98-cu126-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc
ADDED
Binary file (510 Bytes). View file
|
|
build/torch26-cxx98-cu126-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc
ADDED
Binary file (4.71 kB). View file
|
|
build/torch26-cxx98-cu126-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc
ADDED
Binary file (548 Bytes). View file
|
|
build/torch26-cxx98-cu126-aarch64-linux/paged_attention/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _paged_attention_6677800
|
3 |
+
ops = torch.ops._paged_attention_6677800
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_paged_attention_6677800::{op_name}"
|
build/torch26-cxx98-cu126-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:385c8fd6816793573ac802c40e1be99cc5703eebb4df05330ce57b2216da8836
|
3 |
+
size 88162432
|
build/torch26-cxx98-cu126-aarch64-linux/paged_attention/platforms.py
CHANGED
@@ -8,6 +8,7 @@ import numpy as np
|
|
8 |
import torch
|
9 |
|
10 |
IS_ROCM = torch.version.hip is not None
|
|
|
11 |
|
12 |
|
13 |
class Platform(ABC):
|
@@ -32,6 +33,9 @@ class Platform(ABC):
|
|
32 |
@abstractmethod
|
33 |
def is_rocm(self) -> bool: ...
|
34 |
|
|
|
|
|
|
|
35 |
|
36 |
class CudaPlatform(Platform):
|
37 |
@classmethod
|
@@ -45,6 +49,9 @@ class CudaPlatform(Platform):
|
|
45 |
def is_rocm(self) -> bool:
|
46 |
return False
|
47 |
|
|
|
|
|
|
|
48 |
|
49 |
class RocmPlatform(Platform):
|
50 |
@classmethod
|
@@ -58,5 +65,28 @@ class RocmPlatform(Platform):
|
|
58 |
def is_rocm(self) -> bool:
|
59 |
return True
|
60 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
-
current_platform =
|
|
|
|
|
|
|
|
|
|
|
|
8 |
import torch
|
9 |
|
10 |
IS_ROCM = torch.version.hip is not None
|
11 |
+
IS_MPS = torch.backends.mps.is_available()
|
12 |
|
13 |
|
14 |
class Platform(ABC):
|
|
|
33 |
@abstractmethod
|
34 |
def is_rocm(self) -> bool: ...
|
35 |
|
36 |
+
@abstractmethod
|
37 |
+
def is_mps(self) -> bool: ...
|
38 |
+
|
39 |
|
40 |
class CudaPlatform(Platform):
|
41 |
@classmethod
|
|
|
49 |
def is_rocm(self) -> bool:
|
50 |
return False
|
51 |
|
52 |
+
def is_mps(self) -> bool:
|
53 |
+
return False
|
54 |
+
|
55 |
|
56 |
class RocmPlatform(Platform):
|
57 |
@classmethod
|
|
|
65 |
def is_rocm(self) -> bool:
|
66 |
return True
|
67 |
|
68 |
+
def is_mps(self) -> bool:
|
69 |
+
return False
|
70 |
+
|
71 |
+
|
72 |
+
class MpsPlatform(Platform):
|
73 |
+
@classmethod
|
74 |
+
@lru_cache(maxsize=8)
|
75 |
+
def get_device_name(cls, device_id: int = 0) -> str:
|
76 |
+
return torch.cuda.get_device_name(device_id)
|
77 |
+
|
78 |
+
def is_cuda(self) -> bool:
|
79 |
+
return False
|
80 |
+
|
81 |
+
def is_rocm(self) -> bool:
|
82 |
+
return False
|
83 |
+
|
84 |
+
def is_mps(self) -> bool:
|
85 |
+
return True
|
86 |
|
87 |
+
current_platform = (
|
88 |
+
RocmPlatform() if IS_ROCM else
|
89 |
+
MpsPlatform() if IS_MPS else
|
90 |
+
CudaPlatform() if torch.cuda.is_available() else
|
91 |
+
None
|
92 |
+
)
|
build/torch27-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc
ADDED
Binary file (510 Bytes). View file
|
|
build/torch27-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc
ADDED
Binary file (4.71 kB). View file
|
|
build/torch27-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc
ADDED
Binary file (548 Bytes). View file
|
|
build/torch27-cxx11-cu126-aarch64-linux/paged_attention/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _paged_attention_6677800
|
3 |
+
ops = torch.ops._paged_attention_6677800
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_paged_attention_6677800::{op_name}"
|
build/torch27-cxx11-cu126-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2639286d960d9992c40b85c911773825374d8bb218a82eb84cdd4ac41342cb4c
|
3 |
+
size 88169904
|
build/torch27-cxx11-cu126-aarch64-linux/paged_attention/platforms.py
CHANGED
@@ -8,6 +8,7 @@ import numpy as np
|
|
8 |
import torch
|
9 |
|
10 |
IS_ROCM = torch.version.hip is not None
|
|
|
11 |
|
12 |
|
13 |
class Platform(ABC):
|
@@ -32,6 +33,9 @@ class Platform(ABC):
|
|
32 |
@abstractmethod
|
33 |
def is_rocm(self) -> bool: ...
|
34 |
|
|
|
|
|
|
|
35 |
|
36 |
class CudaPlatform(Platform):
|
37 |
@classmethod
|
@@ -45,6 +49,9 @@ class CudaPlatform(Platform):
|
|
45 |
def is_rocm(self) -> bool:
|
46 |
return False
|
47 |
|
|
|
|
|
|
|
48 |
|
49 |
class RocmPlatform(Platform):
|
50 |
@classmethod
|
@@ -58,5 +65,28 @@ class RocmPlatform(Platform):
|
|
58 |
def is_rocm(self) -> bool:
|
59 |
return True
|
60 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
-
current_platform =
|
|
|
|
|
|
|
|
|
|
|
|
8 |
import torch
|
9 |
|
10 |
IS_ROCM = torch.version.hip is not None
|
11 |
+
IS_MPS = torch.backends.mps.is_available()
|
12 |
|
13 |
|
14 |
class Platform(ABC):
|
|
|
33 |
@abstractmethod
|
34 |
def is_rocm(self) -> bool: ...
|
35 |
|
36 |
+
@abstractmethod
|
37 |
+
def is_mps(self) -> bool: ...
|
38 |
+
|
39 |
|
40 |
class CudaPlatform(Platform):
|
41 |
@classmethod
|
|
|
49 |
def is_rocm(self) -> bool:
|
50 |
return False
|
51 |
|
52 |
+
def is_mps(self) -> bool:
|
53 |
+
return False
|
54 |
+
|
55 |
|
56 |
class RocmPlatform(Platform):
|
57 |
@classmethod
|
|
|
65 |
def is_rocm(self) -> bool:
|
66 |
return True
|
67 |
|
68 |
+
def is_mps(self) -> bool:
|
69 |
+
return False
|
70 |
+
|
71 |
+
|
72 |
+
class MpsPlatform(Platform):
|
73 |
+
@classmethod
|
74 |
+
@lru_cache(maxsize=8)
|
75 |
+
def get_device_name(cls, device_id: int = 0) -> str:
|
76 |
+
return torch.cuda.get_device_name(device_id)
|
77 |
+
|
78 |
+
def is_cuda(self) -> bool:
|
79 |
+
return False
|
80 |
+
|
81 |
+
def is_rocm(self) -> bool:
|
82 |
+
return False
|
83 |
+
|
84 |
+
def is_mps(self) -> bool:
|
85 |
+
return True
|
86 |
|
87 |
+
current_platform = (
|
88 |
+
RocmPlatform() if IS_ROCM else
|
89 |
+
MpsPlatform() if IS_MPS else
|
90 |
+
CudaPlatform() if torch.cuda.is_available() else
|
91 |
+
None
|
92 |
+
)
|
build/torch27-cxx11-cu128-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc
ADDED
Binary file (510 Bytes). View file
|
|
build/torch27-cxx11-cu128-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc
ADDED
Binary file (4.71 kB). View file
|
|
build/torch27-cxx11-cu128-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc
ADDED
Binary file (548 Bytes). View file
|
|
build/torch27-cxx11-cu128-aarch64-linux/paged_attention/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _paged_attention_6677800
|
3 |
+
ops = torch.ops._paged_attention_6677800
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_paged_attention_6677800::{op_name}"
|
build/torch27-cxx11-cu128-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7bb98e687cc11950f2103990b777e6699c4a7285a53092def0063a9684fa951e
|
3 |
+
size 120216032
|
build/torch27-cxx11-cu128-aarch64-linux/paged_attention/platforms.py
CHANGED
@@ -8,6 +8,7 @@ import numpy as np
|
|
8 |
import torch
|
9 |
|
10 |
IS_ROCM = torch.version.hip is not None
|
|
|
11 |
|
12 |
|
13 |
class Platform(ABC):
|
@@ -32,6 +33,9 @@ class Platform(ABC):
|
|
32 |
@abstractmethod
|
33 |
def is_rocm(self) -> bool: ...
|
34 |
|
|
|
|
|
|
|
35 |
|
36 |
class CudaPlatform(Platform):
|
37 |
@classmethod
|
@@ -45,6 +49,9 @@ class CudaPlatform(Platform):
|
|
45 |
def is_rocm(self) -> bool:
|
46 |
return False
|
47 |
|
|
|
|
|
|
|
48 |
|
49 |
class RocmPlatform(Platform):
|
50 |
@classmethod
|
@@ -58,5 +65,28 @@ class RocmPlatform(Platform):
|
|
58 |
def is_rocm(self) -> bool:
|
59 |
return True
|
60 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
-
current_platform =
|
|
|
|
|
|
|
|
|
|
|
|
8 |
import torch
|
9 |
|
10 |
IS_ROCM = torch.version.hip is not None
|
11 |
+
IS_MPS = torch.backends.mps.is_available()
|
12 |
|
13 |
|
14 |
class Platform(ABC):
|
|
|
33 |
@abstractmethod
|
34 |
def is_rocm(self) -> bool: ...
|
35 |
|
36 |
+
@abstractmethod
|
37 |
+
def is_mps(self) -> bool: ...
|
38 |
+
|
39 |
|
40 |
class CudaPlatform(Platform):
|
41 |
@classmethod
|
|
|
49 |
def is_rocm(self) -> bool:
|
50 |
return False
|
51 |
|
52 |
+
def is_mps(self) -> bool:
|
53 |
+
return False
|
54 |
+
|
55 |
|
56 |
class RocmPlatform(Platform):
|
57 |
@classmethod
|
|
|
65 |
def is_rocm(self) -> bool:
|
66 |
return True
|
67 |
|
68 |
+
def is_mps(self) -> bool:
|
69 |
+
return False
|
70 |
+
|
71 |
+
|
72 |
+
class MpsPlatform(Platform):
|
73 |
+
@classmethod
|
74 |
+
@lru_cache(maxsize=8)
|
75 |
+
def get_device_name(cls, device_id: int = 0) -> str:
|
76 |
+
return torch.cuda.get_device_name(device_id)
|
77 |
+
|
78 |
+
def is_cuda(self) -> bool:
|
79 |
+
return False
|
80 |
+
|
81 |
+
def is_rocm(self) -> bool:
|
82 |
+
return False
|
83 |
+
|
84 |
+
def is_mps(self) -> bool:
|
85 |
+
return True
|
86 |
|
87 |
+
current_platform = (
|
88 |
+
RocmPlatform() if IS_ROCM else
|
89 |
+
MpsPlatform() if IS_MPS else
|
90 |
+
CudaPlatform() if torch.cuda.is_available() else
|
91 |
+
None
|
92 |
+
)
|