danieldk HF Staff commited on
Commit
e6ce28c
·
1 Parent(s): 6677800

Build (aarch64-linux)

Browse files
Files changed (24) hide show
  1. build/torch26-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc +0 -0
  2. build/torch26-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc +0 -0
  3. build/torch26-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc +0 -0
  4. build/torch26-cxx11-cu126-aarch64-linux/paged_attention/_ops.py +3 -3
  5. build/torch26-cxx11-cu126-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so} +2 -2
  6. build/torch26-cxx11-cu126-aarch64-linux/paged_attention/platforms.py +31 -1
  7. build/torch26-cxx98-cu126-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc +0 -0
  8. build/torch26-cxx98-cu126-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc +0 -0
  9. build/torch26-cxx98-cu126-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc +0 -0
  10. build/torch26-cxx98-cu126-aarch64-linux/paged_attention/_ops.py +3 -3
  11. build/torch26-cxx98-cu126-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so} +2 -2
  12. build/torch26-cxx98-cu126-aarch64-linux/paged_attention/platforms.py +31 -1
  13. build/torch27-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc +0 -0
  14. build/torch27-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc +0 -0
  15. build/torch27-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc +0 -0
  16. build/torch27-cxx11-cu126-aarch64-linux/paged_attention/_ops.py +3 -3
  17. build/torch27-cxx11-cu126-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so} +2 -2
  18. build/torch27-cxx11-cu126-aarch64-linux/paged_attention/platforms.py +31 -1
  19. build/torch27-cxx11-cu128-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc +0 -0
  20. build/torch27-cxx11-cu128-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc +0 -0
  21. build/torch27-cxx11-cu128-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc +0 -0
  22. build/torch27-cxx11-cu128-aarch64-linux/paged_attention/_ops.py +3 -3
  23. build/torch27-cxx11-cu128-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so} +2 -2
  24. build/torch27-cxx11-cu128-aarch64-linux/paged_attention/platforms.py +31 -1
build/torch26-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (510 Bytes). View file
 
build/torch26-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc ADDED
Binary file (4.71 kB). View file
 
build/torch26-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc ADDED
Binary file (548 Bytes). View file
 
build/torch26-cxx11-cu126-aarch64-linux/paged_attention/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _paged_attention_daf6221
3
- ops = torch.ops._paged_attention_daf6221
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_paged_attention_daf6221::{op_name}"
 
1
  import torch
2
+ from . import _paged_attention_6677800
3
+ ops = torch.ops._paged_attention_6677800
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_paged_attention_6677800::{op_name}"
build/torch26-cxx11-cu126-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ac7b8c17999df02a6869772e37713c22d3a765747daa576e64a4ff84a246dfaa
3
- size 87576024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b860f51a86eb13c6c7a6e74f28c59abf0b73245f241744766029fbda53cba87
3
+ size 88169704
build/torch26-cxx11-cu126-aarch64-linux/paged_attention/platforms.py CHANGED
@@ -8,6 +8,7 @@ import numpy as np
8
  import torch
9
 
10
  IS_ROCM = torch.version.hip is not None
 
11
 
12
 
13
  class Platform(ABC):
@@ -32,6 +33,9 @@ class Platform(ABC):
32
  @abstractmethod
33
  def is_rocm(self) -> bool: ...
34
 
 
 
 
35
 
36
  class CudaPlatform(Platform):
37
  @classmethod
@@ -45,6 +49,9 @@ class CudaPlatform(Platform):
45
  def is_rocm(self) -> bool:
46
  return False
47
 
 
 
 
48
 
49
  class RocmPlatform(Platform):
50
  @classmethod
@@ -58,5 +65,28 @@ class RocmPlatform(Platform):
58
  def is_rocm(self) -> bool:
59
  return True
60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
62
- current_platform = RocmPlatform() if IS_ROCM else CudaPlatform()
 
 
 
 
 
 
8
  import torch
9
 
10
  IS_ROCM = torch.version.hip is not None
11
+ IS_MPS = torch.backends.mps.is_available()
12
 
13
 
14
  class Platform(ABC):
 
33
  @abstractmethod
34
  def is_rocm(self) -> bool: ...
35
 
36
+ @abstractmethod
37
+ def is_mps(self) -> bool: ...
38
+
39
 
40
  class CudaPlatform(Platform):
41
  @classmethod
 
49
  def is_rocm(self) -> bool:
50
  return False
51
 
52
+ def is_mps(self) -> bool:
53
+ return False
54
+
55
 
56
  class RocmPlatform(Platform):
57
  @classmethod
 
65
  def is_rocm(self) -> bool:
66
  return True
67
 
68
+ def is_mps(self) -> bool:
69
+ return False
70
+
71
+
72
+ class MpsPlatform(Platform):
73
+ @classmethod
74
+ @lru_cache(maxsize=8)
75
+ def get_device_name(cls, device_id: int = 0) -> str:
76
+ return torch.cuda.get_device_name(device_id)
77
+
78
+ def is_cuda(self) -> bool:
79
+ return False
80
+
81
+ def is_rocm(self) -> bool:
82
+ return False
83
+
84
+ def is_mps(self) -> bool:
85
+ return True
86
 
87
+ current_platform = (
88
+ RocmPlatform() if IS_ROCM else
89
+ MpsPlatform() if IS_MPS else
90
+ CudaPlatform() if torch.cuda.is_available() else
91
+ None
92
+ )
build/torch26-cxx98-cu126-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (510 Bytes). View file
 
build/torch26-cxx98-cu126-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc ADDED
Binary file (4.71 kB). View file
 
build/torch26-cxx98-cu126-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc ADDED
Binary file (548 Bytes). View file
 
build/torch26-cxx98-cu126-aarch64-linux/paged_attention/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _paged_attention_daf6221
3
- ops = torch.ops._paged_attention_daf6221
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_paged_attention_daf6221::{op_name}"
 
1
  import torch
2
+ from . import _paged_attention_6677800
3
+ ops = torch.ops._paged_attention_6677800
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_paged_attention_6677800::{op_name}"
build/torch26-cxx98-cu126-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:854c53fdffb1ee8f889cbbb78127cf92a843c55d00228b8aadcd48050d700af0
3
- size 87568816
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:385c8fd6816793573ac802c40e1be99cc5703eebb4df05330ce57b2216da8836
3
+ size 88162432
build/torch26-cxx98-cu126-aarch64-linux/paged_attention/platforms.py CHANGED
@@ -8,6 +8,7 @@ import numpy as np
8
  import torch
9
 
10
  IS_ROCM = torch.version.hip is not None
 
11
 
12
 
13
  class Platform(ABC):
@@ -32,6 +33,9 @@ class Platform(ABC):
32
  @abstractmethod
33
  def is_rocm(self) -> bool: ...
34
 
 
 
 
35
 
36
  class CudaPlatform(Platform):
37
  @classmethod
@@ -45,6 +49,9 @@ class CudaPlatform(Platform):
45
  def is_rocm(self) -> bool:
46
  return False
47
 
 
 
 
48
 
49
  class RocmPlatform(Platform):
50
  @classmethod
@@ -58,5 +65,28 @@ class RocmPlatform(Platform):
58
  def is_rocm(self) -> bool:
59
  return True
60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
62
- current_platform = RocmPlatform() if IS_ROCM else CudaPlatform()
 
 
 
 
 
 
8
  import torch
9
 
10
  IS_ROCM = torch.version.hip is not None
11
+ IS_MPS = torch.backends.mps.is_available()
12
 
13
 
14
  class Platform(ABC):
 
33
  @abstractmethod
34
  def is_rocm(self) -> bool: ...
35
 
36
+ @abstractmethod
37
+ def is_mps(self) -> bool: ...
38
+
39
 
40
  class CudaPlatform(Platform):
41
  @classmethod
 
49
  def is_rocm(self) -> bool:
50
  return False
51
 
52
+ def is_mps(self) -> bool:
53
+ return False
54
+
55
 
56
  class RocmPlatform(Platform):
57
  @classmethod
 
65
  def is_rocm(self) -> bool:
66
  return True
67
 
68
+ def is_mps(self) -> bool:
69
+ return False
70
+
71
+
72
+ class MpsPlatform(Platform):
73
+ @classmethod
74
+ @lru_cache(maxsize=8)
75
+ def get_device_name(cls, device_id: int = 0) -> str:
76
+ return torch.cuda.get_device_name(device_id)
77
+
78
+ def is_cuda(self) -> bool:
79
+ return False
80
+
81
+ def is_rocm(self) -> bool:
82
+ return False
83
+
84
+ def is_mps(self) -> bool:
85
+ return True
86
 
87
+ current_platform = (
88
+ RocmPlatform() if IS_ROCM else
89
+ MpsPlatform() if IS_MPS else
90
+ CudaPlatform() if torch.cuda.is_available() else
91
+ None
92
+ )
build/torch27-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (510 Bytes). View file
 
build/torch27-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc ADDED
Binary file (4.71 kB). View file
 
build/torch27-cxx11-cu126-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc ADDED
Binary file (548 Bytes). View file
 
build/torch27-cxx11-cu126-aarch64-linux/paged_attention/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _paged_attention_daf6221
3
- ops = torch.ops._paged_attention_daf6221
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_paged_attention_daf6221::{op_name}"
 
1
  import torch
2
+ from . import _paged_attention_6677800
3
+ ops = torch.ops._paged_attention_6677800
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_paged_attention_6677800::{op_name}"
build/torch27-cxx11-cu126-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:71c360e1d9268a0794c51393fd1d943472952abb5748af37a243a3f06155fb8c
3
- size 87576232
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2639286d960d9992c40b85c911773825374d8bb218a82eb84cdd4ac41342cb4c
3
+ size 88169904
build/torch27-cxx11-cu126-aarch64-linux/paged_attention/platforms.py CHANGED
@@ -8,6 +8,7 @@ import numpy as np
8
  import torch
9
 
10
  IS_ROCM = torch.version.hip is not None
 
11
 
12
 
13
  class Platform(ABC):
@@ -32,6 +33,9 @@ class Platform(ABC):
32
  @abstractmethod
33
  def is_rocm(self) -> bool: ...
34
 
 
 
 
35
 
36
  class CudaPlatform(Platform):
37
  @classmethod
@@ -45,6 +49,9 @@ class CudaPlatform(Platform):
45
  def is_rocm(self) -> bool:
46
  return False
47
 
 
 
 
48
 
49
  class RocmPlatform(Platform):
50
  @classmethod
@@ -58,5 +65,28 @@ class RocmPlatform(Platform):
58
  def is_rocm(self) -> bool:
59
  return True
60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
62
- current_platform = RocmPlatform() if IS_ROCM else CudaPlatform()
 
 
 
 
 
 
8
  import torch
9
 
10
  IS_ROCM = torch.version.hip is not None
11
+ IS_MPS = torch.backends.mps.is_available()
12
 
13
 
14
  class Platform(ABC):
 
33
  @abstractmethod
34
  def is_rocm(self) -> bool: ...
35
 
36
+ @abstractmethod
37
+ def is_mps(self) -> bool: ...
38
+
39
 
40
  class CudaPlatform(Platform):
41
  @classmethod
 
49
  def is_rocm(self) -> bool:
50
  return False
51
 
52
+ def is_mps(self) -> bool:
53
+ return False
54
+
55
 
56
  class RocmPlatform(Platform):
57
  @classmethod
 
65
  def is_rocm(self) -> bool:
66
  return True
67
 
68
+ def is_mps(self) -> bool:
69
+ return False
70
+
71
+
72
+ class MpsPlatform(Platform):
73
+ @classmethod
74
+ @lru_cache(maxsize=8)
75
+ def get_device_name(cls, device_id: int = 0) -> str:
76
+ return torch.cuda.get_device_name(device_id)
77
+
78
+ def is_cuda(self) -> bool:
79
+ return False
80
+
81
+ def is_rocm(self) -> bool:
82
+ return False
83
+
84
+ def is_mps(self) -> bool:
85
+ return True
86
 
87
+ current_platform = (
88
+ RocmPlatform() if IS_ROCM else
89
+ MpsPlatform() if IS_MPS else
90
+ CudaPlatform() if torch.cuda.is_available() else
91
+ None
92
+ )
build/torch27-cxx11-cu128-aarch64-linux/paged_attention/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (510 Bytes). View file
 
build/torch27-cxx11-cu128-aarch64-linux/paged_attention/__pycache__/_custom_ops.cpython-312.pyc ADDED
Binary file (4.71 kB). View file
 
build/torch27-cxx11-cu128-aarch64-linux/paged_attention/__pycache__/_ops.cpython-312.pyc ADDED
Binary file (548 Bytes). View file
 
build/torch27-cxx11-cu128-aarch64-linux/paged_attention/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _paged_attention_daf6221
3
- ops = torch.ops._paged_attention_daf6221
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_paged_attention_daf6221::{op_name}"
 
1
  import torch
2
+ from . import _paged_attention_6677800
3
+ ops = torch.ops._paged_attention_6677800
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_paged_attention_6677800::{op_name}"
build/torch27-cxx11-cu128-aarch64-linux/paged_attention/{_paged_attention_daf6221.abi3.so → _paged_attention_6677800.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d39b07d716e2499580fdf6edf14af97abaef398453e77fd7d360e6d6bcc12df1
3
- size 119164016
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7bb98e687cc11950f2103990b777e6699c4a7285a53092def0063a9684fa951e
3
+ size 120216032
build/torch27-cxx11-cu128-aarch64-linux/paged_attention/platforms.py CHANGED
@@ -8,6 +8,7 @@ import numpy as np
8
  import torch
9
 
10
  IS_ROCM = torch.version.hip is not None
 
11
 
12
 
13
  class Platform(ABC):
@@ -32,6 +33,9 @@ class Platform(ABC):
32
  @abstractmethod
33
  def is_rocm(self) -> bool: ...
34
 
 
 
 
35
 
36
  class CudaPlatform(Platform):
37
  @classmethod
@@ -45,6 +49,9 @@ class CudaPlatform(Platform):
45
  def is_rocm(self) -> bool:
46
  return False
47
 
 
 
 
48
 
49
  class RocmPlatform(Platform):
50
  @classmethod
@@ -58,5 +65,28 @@ class RocmPlatform(Platform):
58
  def is_rocm(self) -> bool:
59
  return True
60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
62
- current_platform = RocmPlatform() if IS_ROCM else CudaPlatform()
 
 
 
 
 
 
8
  import torch
9
 
10
  IS_ROCM = torch.version.hip is not None
11
+ IS_MPS = torch.backends.mps.is_available()
12
 
13
 
14
  class Platform(ABC):
 
33
  @abstractmethod
34
  def is_rocm(self) -> bool: ...
35
 
36
+ @abstractmethod
37
+ def is_mps(self) -> bool: ...
38
+
39
 
40
  class CudaPlatform(Platform):
41
  @classmethod
 
49
  def is_rocm(self) -> bool:
50
  return False
51
 
52
+ def is_mps(self) -> bool:
53
+ return False
54
+
55
 
56
  class RocmPlatform(Platform):
57
  @classmethod
 
65
  def is_rocm(self) -> bool:
66
  return True
67
 
68
+ def is_mps(self) -> bool:
69
+ return False
70
+
71
+
72
+ class MpsPlatform(Platform):
73
+ @classmethod
74
+ @lru_cache(maxsize=8)
75
+ def get_device_name(cls, device_id: int = 0) -> str:
76
+ return torch.cuda.get_device_name(device_id)
77
+
78
+ def is_cuda(self) -> bool:
79
+ return False
80
+
81
+ def is_rocm(self) -> bool:
82
+ return False
83
+
84
+ def is_mps(self) -> bool:
85
+ return True
86
 
87
+ current_platform = (
88
+ RocmPlatform() if IS_ROCM else
89
+ MpsPlatform() if IS_MPS else
90
+ CudaPlatform() if torch.cuda.is_available() else
91
+ None
92
+ )