Build
Browse files- build/torch24-cxx11-cu118-x86_64-linux/activation/__init__.py +35 -29
- build/torch24-cxx11-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so +2 -2
- build/torch24-cxx11-cu121-x86_64-linux/activation/__init__.py +35 -29
- build/torch24-cxx11-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
- build/torch24-cxx11-cu124-x86_64-linux/activation/__init__.py +35 -29
- build/torch24-cxx11-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
- build/torch24-cxx98-cu118-x86_64-linux/activation/__init__.py +35 -29
- build/torch24-cxx98-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so +2 -2
- build/torch24-cxx98-cu121-x86_64-linux/activation/__init__.py +35 -29
- build/torch24-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
- build/torch24-cxx98-cu124-x86_64-linux/activation/__init__.py +35 -29
- build/torch24-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
- build/torch25-cxx11-cu118-x86_64-linux/activation/__init__.py +35 -29
- build/torch25-cxx11-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so +2 -2
- build/torch25-cxx11-cu121-x86_64-linux/activation/__init__.py +35 -29
- build/torch25-cxx11-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
- build/torch25-cxx11-cu124-x86_64-linux/activation/__init__.py +35 -29
- build/torch25-cxx11-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
- build/torch25-cxx98-cu118-x86_64-linux/activation/__init__.py +35 -29
- build/torch25-cxx98-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so +2 -2
- build/torch25-cxx98-cu121-x86_64-linux/activation/__init__.py +35 -29
- build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
- build/torch25-cxx98-cu124-x86_64-linux/activation/__init__.py +35 -29
- build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so +1 -1
build/torch24-cxx11-cu118-x86_64-linux/activation/__init__.py
CHANGED
|
@@ -6,36 +6,42 @@ except ImportError as e:
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
|
|
|
| 9 |
ops = torch.ops._activition
|
| 10 |
except ImportError:
|
| 11 |
raise e
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 15 |
-
ops.silu_and_mul(out, x)
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
ops.gelu_quick(out, x)
|
|
|
|
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
| 9 |
+
|
| 10 |
ops = torch.ops._activition
|
| 11 |
except ImportError:
|
| 12 |
raise e
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 16 |
+
ops.silu_and_mul(out, x)
|
| 17 |
+
return out
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 21 |
+
ops.gelu_and_mul(out, x)
|
| 22 |
+
return out
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 26 |
+
ops.gelu_tanh_and_mul(out, x)
|
| 27 |
+
return out
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
|
| 31 |
+
ops.fatrelu_and_mul(out, x, threshold)
|
| 32 |
+
return out
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 36 |
+
ops.gelu_fast(out, x)
|
| 37 |
+
return out
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 41 |
+
ops.gelu_new(out, x)
|
| 42 |
+
return out
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 46 |
ops.gelu_quick(out, x)
|
| 47 |
+
return out
|
build/torch24-cxx11-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d6dcb1ada7977e1a47ed9d2947c41daf7db9074a9947f36487de13a99e0840ba
|
| 3 |
+
size 2393088
|
build/torch24-cxx11-cu121-x86_64-linux/activation/__init__.py
CHANGED
|
@@ -6,36 +6,42 @@ except ImportError as e:
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
|
|
|
| 9 |
ops = torch.ops._activition
|
| 10 |
except ImportError:
|
| 11 |
raise e
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 15 |
-
ops.silu_and_mul(out, x)
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
ops.gelu_quick(out, x)
|
|
|
|
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
| 9 |
+
|
| 10 |
ops = torch.ops._activition
|
| 11 |
except ImportError:
|
| 12 |
raise e
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 16 |
+
ops.silu_and_mul(out, x)
|
| 17 |
+
return out
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 21 |
+
ops.gelu_and_mul(out, x)
|
| 22 |
+
return out
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 26 |
+
ops.gelu_tanh_and_mul(out, x)
|
| 27 |
+
return out
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
|
| 31 |
+
ops.fatrelu_and_mul(out, x, threshold)
|
| 32 |
+
return out
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 36 |
+
ops.gelu_fast(out, x)
|
| 37 |
+
return out
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 41 |
+
ops.gelu_new(out, x)
|
| 42 |
+
return out
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 46 |
ops.gelu_quick(out, x)
|
| 47 |
+
return out
|
build/torch24-cxx11-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2393472
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f7383c424c3dadb9cf2c32d7b19f28bddb6e3dd31052b3cb2e324d05f5592c84
|
| 3 |
size 2393472
|
build/torch24-cxx11-cu124-x86_64-linux/activation/__init__.py
CHANGED
|
@@ -6,36 +6,42 @@ except ImportError as e:
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
|
|
|
| 9 |
ops = torch.ops._activition
|
| 10 |
except ImportError:
|
| 11 |
raise e
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 15 |
-
ops.silu_and_mul(out, x)
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
ops.gelu_quick(out, x)
|
|
|
|
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
| 9 |
+
|
| 10 |
ops = torch.ops._activition
|
| 11 |
except ImportError:
|
| 12 |
raise e
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 16 |
+
ops.silu_and_mul(out, x)
|
| 17 |
+
return out
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 21 |
+
ops.gelu_and_mul(out, x)
|
| 22 |
+
return out
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 26 |
+
ops.gelu_tanh_and_mul(out, x)
|
| 27 |
+
return out
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
|
| 31 |
+
ops.fatrelu_and_mul(out, x, threshold)
|
| 32 |
+
return out
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 36 |
+
ops.gelu_fast(out, x)
|
| 37 |
+
return out
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 41 |
+
ops.gelu_new(out, x)
|
| 42 |
+
return out
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 46 |
ops.gelu_quick(out, x)
|
| 47 |
+
return out
|
build/torch24-cxx11-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2277656
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1ae1b1f6a69835b66aef515a31ef7cc35c976090e6932b0dd747911e4453c23c
|
| 3 |
size 2277656
|
build/torch24-cxx98-cu118-x86_64-linux/activation/__init__.py
CHANGED
|
@@ -6,36 +6,42 @@ except ImportError as e:
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
|
|
|
| 9 |
ops = torch.ops._activition
|
| 10 |
except ImportError:
|
| 11 |
raise e
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 15 |
-
ops.silu_and_mul(out, x)
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
ops.gelu_quick(out, x)
|
|
|
|
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
| 9 |
+
|
| 10 |
ops = torch.ops._activition
|
| 11 |
except ImportError:
|
| 12 |
raise e
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 16 |
+
ops.silu_and_mul(out, x)
|
| 17 |
+
return out
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 21 |
+
ops.gelu_and_mul(out, x)
|
| 22 |
+
return out
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 26 |
+
ops.gelu_tanh_and_mul(out, x)
|
| 27 |
+
return out
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
|
| 31 |
+
ops.fatrelu_and_mul(out, x, threshold)
|
| 32 |
+
return out
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 36 |
+
ops.gelu_fast(out, x)
|
| 37 |
+
return out
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 41 |
+
ops.gelu_new(out, x)
|
| 42 |
+
return out
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 46 |
ops.gelu_quick(out, x)
|
| 47 |
+
return out
|
build/torch24-cxx98-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:f7d7a781448907bebe4fa8c73e45d751a224a8849de099338904b32e70779039
|
| 3 |
+
size 2377704
|
build/torch24-cxx98-cu121-x86_64-linux/activation/__init__.py
CHANGED
|
@@ -6,36 +6,42 @@ except ImportError as e:
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
|
|
|
| 9 |
ops = torch.ops._activition
|
| 10 |
except ImportError:
|
| 11 |
raise e
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 15 |
-
ops.silu_and_mul(out, x)
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
ops.gelu_quick(out, x)
|
|
|
|
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
| 9 |
+
|
| 10 |
ops = torch.ops._activition
|
| 11 |
except ImportError:
|
| 12 |
raise e
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 16 |
+
ops.silu_and_mul(out, x)
|
| 17 |
+
return out
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 21 |
+
ops.gelu_and_mul(out, x)
|
| 22 |
+
return out
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 26 |
+
ops.gelu_tanh_and_mul(out, x)
|
| 27 |
+
return out
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
|
| 31 |
+
ops.fatrelu_and_mul(out, x, threshold)
|
| 32 |
+
return out
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 36 |
+
ops.gelu_fast(out, x)
|
| 37 |
+
return out
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 41 |
+
ops.gelu_new(out, x)
|
| 42 |
+
return out
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 46 |
ops.gelu_quick(out, x)
|
| 47 |
+
return out
|
build/torch24-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2373608
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:87f4213948d1b1083a39b9136f7ff08814482cf83ed100b132652963702e22b3
|
| 3 |
size 2373608
|
build/torch24-cxx98-cu124-x86_64-linux/activation/__init__.py
CHANGED
|
@@ -6,36 +6,42 @@ except ImportError as e:
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
|
|
|
| 9 |
ops = torch.ops._activition
|
| 10 |
except ImportError:
|
| 11 |
raise e
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 15 |
-
ops.silu_and_mul(out, x)
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
ops.gelu_quick(out, x)
|
|
|
|
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
| 9 |
+
|
| 10 |
ops = torch.ops._activition
|
| 11 |
except ImportError:
|
| 12 |
raise e
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 16 |
+
ops.silu_and_mul(out, x)
|
| 17 |
+
return out
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 21 |
+
ops.gelu_and_mul(out, x)
|
| 22 |
+
return out
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 26 |
+
ops.gelu_tanh_and_mul(out, x)
|
| 27 |
+
return out
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
|
| 31 |
+
ops.fatrelu_and_mul(out, x, threshold)
|
| 32 |
+
return out
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 36 |
+
ops.gelu_fast(out, x)
|
| 37 |
+
return out
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 41 |
+
ops.gelu_new(out, x)
|
| 42 |
+
return out
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 46 |
ops.gelu_quick(out, x)
|
| 47 |
+
return out
|
build/torch24-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2253864
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9aebc81d7832f6125bb64a3cc16ceef16fcb02dd5c129d45d16c65526e297dd7
|
| 3 |
size 2253864
|
build/torch25-cxx11-cu118-x86_64-linux/activation/__init__.py
CHANGED
|
@@ -6,36 +6,42 @@ except ImportError as e:
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
|
|
|
| 9 |
ops = torch.ops._activition
|
| 10 |
except ImportError:
|
| 11 |
raise e
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 15 |
-
ops.silu_and_mul(out, x)
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
ops.gelu_quick(out, x)
|
|
|
|
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
| 9 |
+
|
| 10 |
ops = torch.ops._activition
|
| 11 |
except ImportError:
|
| 12 |
raise e
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 16 |
+
ops.silu_and_mul(out, x)
|
| 17 |
+
return out
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 21 |
+
ops.gelu_and_mul(out, x)
|
| 22 |
+
return out
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 26 |
+
ops.gelu_tanh_and_mul(out, x)
|
| 27 |
+
return out
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
|
| 31 |
+
ops.fatrelu_and_mul(out, x, threshold)
|
| 32 |
+
return out
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 36 |
+
ops.gelu_fast(out, x)
|
| 37 |
+
return out
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 41 |
+
ops.gelu_new(out, x)
|
| 42 |
+
return out
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 46 |
ops.gelu_quick(out, x)
|
| 47 |
+
return out
|
build/torch25-cxx11-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3c8dec0f48ddf3b692b0f2d5d44615cd28c03d71240b96d30632932baef967d9
|
| 3 |
+
size 2393088
|
build/torch25-cxx11-cu121-x86_64-linux/activation/__init__.py
CHANGED
|
@@ -6,36 +6,42 @@ except ImportError as e:
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
|
|
|
| 9 |
ops = torch.ops._activition
|
| 10 |
except ImportError:
|
| 11 |
raise e
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 15 |
-
ops.silu_and_mul(out, x)
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
ops.gelu_quick(out, x)
|
|
|
|
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
| 9 |
+
|
| 10 |
ops = torch.ops._activition
|
| 11 |
except ImportError:
|
| 12 |
raise e
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 16 |
+
ops.silu_and_mul(out, x)
|
| 17 |
+
return out
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 21 |
+
ops.gelu_and_mul(out, x)
|
| 22 |
+
return out
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 26 |
+
ops.gelu_tanh_and_mul(out, x)
|
| 27 |
+
return out
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
|
| 31 |
+
ops.fatrelu_and_mul(out, x, threshold)
|
| 32 |
+
return out
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 36 |
+
ops.gelu_fast(out, x)
|
| 37 |
+
return out
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 41 |
+
ops.gelu_new(out, x)
|
| 42 |
+
return out
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 46 |
ops.gelu_quick(out, x)
|
| 47 |
+
return out
|
build/torch25-cxx11-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2393472
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a0415c28343a1e9eb0dfa459ee62825444b9c87378dd6d682d92d38e7445b3b7
|
| 3 |
size 2393472
|
build/torch25-cxx11-cu124-x86_64-linux/activation/__init__.py
CHANGED
|
@@ -6,36 +6,42 @@ except ImportError as e:
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
|
|
|
| 9 |
ops = torch.ops._activition
|
| 10 |
except ImportError:
|
| 11 |
raise e
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 15 |
-
ops.silu_and_mul(out, x)
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
ops.gelu_quick(out, x)
|
|
|
|
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
| 9 |
+
|
| 10 |
ops = torch.ops._activition
|
| 11 |
except ImportError:
|
| 12 |
raise e
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 16 |
+
ops.silu_and_mul(out, x)
|
| 17 |
+
return out
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 21 |
+
ops.gelu_and_mul(out, x)
|
| 22 |
+
return out
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 26 |
+
ops.gelu_tanh_and_mul(out, x)
|
| 27 |
+
return out
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
|
| 31 |
+
ops.fatrelu_and_mul(out, x, threshold)
|
| 32 |
+
return out
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 36 |
+
ops.gelu_fast(out, x)
|
| 37 |
+
return out
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 41 |
+
ops.gelu_new(out, x)
|
| 42 |
+
return out
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 46 |
ops.gelu_quick(out, x)
|
| 47 |
+
return out
|
build/torch25-cxx11-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2277656
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:79dccf3c5f06ab07eb34b91c5f3dce260ee42514b4c01a8f09b3e4f7707ec026
|
| 3 |
size 2277656
|
build/torch25-cxx98-cu118-x86_64-linux/activation/__init__.py
CHANGED
|
@@ -6,36 +6,42 @@ except ImportError as e:
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
|
|
|
| 9 |
ops = torch.ops._activition
|
| 10 |
except ImportError:
|
| 11 |
raise e
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 15 |
-
ops.silu_and_mul(out, x)
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
ops.gelu_quick(out, x)
|
|
|
|
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
| 9 |
+
|
| 10 |
ops = torch.ops._activition
|
| 11 |
except ImportError:
|
| 12 |
raise e
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 16 |
+
ops.silu_and_mul(out, x)
|
| 17 |
+
return out
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 21 |
+
ops.gelu_and_mul(out, x)
|
| 22 |
+
return out
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 26 |
+
ops.gelu_tanh_and_mul(out, x)
|
| 27 |
+
return out
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
|
| 31 |
+
ops.fatrelu_and_mul(out, x, threshold)
|
| 32 |
+
return out
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 36 |
+
ops.gelu_fast(out, x)
|
| 37 |
+
return out
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 41 |
+
ops.gelu_new(out, x)
|
| 42 |
+
return out
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 46 |
ops.gelu_quick(out, x)
|
| 47 |
+
return out
|
build/torch25-cxx98-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:12be474aa43aa5ba609fae1e3c0c7695178f9addfd931096fda6080a16e4f810
|
| 3 |
+
size 2377704
|
build/torch25-cxx98-cu121-x86_64-linux/activation/__init__.py
CHANGED
|
@@ -6,36 +6,42 @@ except ImportError as e:
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
|
|
|
| 9 |
ops = torch.ops._activition
|
| 10 |
except ImportError:
|
| 11 |
raise e
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 15 |
-
ops.silu_and_mul(out, x)
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
ops.gelu_quick(out, x)
|
|
|
|
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
| 9 |
+
|
| 10 |
ops = torch.ops._activition
|
| 11 |
except ImportError:
|
| 12 |
raise e
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 16 |
+
ops.silu_and_mul(out, x)
|
| 17 |
+
return out
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 21 |
+
ops.gelu_and_mul(out, x)
|
| 22 |
+
return out
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 26 |
+
ops.gelu_tanh_and_mul(out, x)
|
| 27 |
+
return out
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
|
| 31 |
+
ops.fatrelu_and_mul(out, x, threshold)
|
| 32 |
+
return out
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 36 |
+
ops.gelu_fast(out, x)
|
| 37 |
+
return out
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 41 |
+
ops.gelu_new(out, x)
|
| 42 |
+
return out
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 46 |
ops.gelu_quick(out, x)
|
| 47 |
+
return out
|
build/torch25-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2373608
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2686f1c659110f1f6ad6ef32d73c0bb4fc9563b7975f320742b597c4938fe6f3
|
| 3 |
size 2373608
|
build/torch25-cxx98-cu124-x86_64-linux/activation/__init__.py
CHANGED
|
@@ -6,36 +6,42 @@ except ImportError as e:
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
|
|
|
| 9 |
ops = torch.ops._activition
|
| 10 |
except ImportError:
|
| 11 |
raise e
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 15 |
-
ops.silu_and_mul(out, x)
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
ops.gelu_quick(out, x)
|
|
|
|
|
|
| 6 |
# Fallback for local development.
|
| 7 |
try:
|
| 8 |
import _activation
|
| 9 |
+
|
| 10 |
ops = torch.ops._activition
|
| 11 |
except ImportError:
|
| 12 |
raise e
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 16 |
+
ops.silu_and_mul(out, x)
|
| 17 |
+
return out
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 21 |
+
ops.gelu_and_mul(out, x)
|
| 22 |
+
return out
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 26 |
+
ops.gelu_tanh_and_mul(out, x)
|
| 27 |
+
return out
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
|
| 31 |
+
ops.fatrelu_and_mul(out, x, threshold)
|
| 32 |
+
return out
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 36 |
+
ops.gelu_fast(out, x)
|
| 37 |
+
return out
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 41 |
+
ops.gelu_new(out, x)
|
| 42 |
+
return out
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
|
| 46 |
ops.gelu_quick(out, x)
|
| 47 |
+
return out
|
build/torch25-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2253864
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c4b5dd766087d45ed5c570af1c2fdfcf8aebbba88a7b8d77d13e1aa1875d9e90
|
| 3 |
size 2253864
|