| import torch | |
| from ._ops import ops | |
| from . import layers | |
| def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: | |
| ops.silu_and_mul(out, x) | |
| return out | |
| def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: | |
| ops.gelu_and_mul(out, x) | |
| return out | |
| def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None: | |
| ops.gelu_tanh_and_mul(out, x) | |
| return out | |
| def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None: | |
| ops.fatrelu_and_mul(out, x, threshold) | |
| return out | |
| def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None: | |
| ops.gelu_fast(out, x) | |
| return out | |
| def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None: | |
| ops.gelu_new(out, x) | |
| return out | |
| def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None: | |
| ops.gelu_quick(out, x) | |
| return out | |
| __all__ = [ | |
| "silu_and_mul", | |
| "gelu_and_mul", | |
| "gelu_tanh_and_mul", | |
| "fatrelu_and_mul", | |
| "gelu_fast", | |
| "gelu_new", | |
| "gelu_quick", | |
| "layers", | |
| ] | |