danieldk's picture
danieldk HF Staff
Add layers
e21f2b2
raw
history blame
1.02 kB
import torch
from ._ops import ops
from . import layers
def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
ops.silu_and_mul(out, x)
return out
def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
ops.gelu_and_mul(out, x)
return out
def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
ops.gelu_tanh_and_mul(out, x)
return out
def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
ops.fatrelu_and_mul(out, x, threshold)
return out
def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
ops.gelu_fast(out, x)
return out
def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
ops.gelu_new(out, x)
return out
def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
ops.gelu_quick(out, x)
return out
__all__ = [
"silu_and_mul",
"gelu_and_mul",
"gelu_tanh_and_mul",
"fatrelu_and_mul",
"gelu_fast",
"gelu_new",
"gelu_quick",
"layers",
]