Unary & Activations#
abs#
def abs(x: 'Tensor') -> 'Tensor':
neg#
def neg(x: 'Tensor') -> 'Tensor':
exp#
def exp(x: 'Tensor') -> 'Tensor':
log#
def log(x: 'Tensor') -> 'Tensor':
log1p#
def log1p(x: 'Tensor') -> 'Tensor':
sqrt#
def sqrt(x: 'Tensor') -> 'Tensor':
rsqrt#
def rsqrt(x: 'Tensor') -> 'Tensor':
sin#
def sin(x: 'Tensor') -> 'Tensor':
cos#
def cos(x: 'Tensor') -> 'Tensor':
acos#
def acos(x: 'Tensor') -> 'Tensor':
atanh#
def atanh(x: 'Tensor') -> 'Tensor':
erf#
def erf(x: 'Tensor') -> 'Tensor':
floor#
def floor(x: 'Tensor') -> 'Tensor':
round#
def round(x: 'Tensor') -> 'Tensor':
trunc#
def trunc(x: 'Tensor') -> 'Tensor':
cast#
def cast(x: 'Tensor', dtype: 'DType | None' = None) -> 'Tensor':
is_inf#
def is_inf(x: 'Tensor') -> 'Tensor':
is_nan#
def is_nan(x: 'Tensor') -> 'Tensor':
relu#
def relu(x: 'Tensor') -> 'Tensor':
sigmoid#
def sigmoid(x: 'Tensor') -> 'Tensor':
tanh#
def tanh(x: 'Tensor') -> 'Tensor':
gelu#
def gelu(x: 'Tensor') -> 'Tensor':
silu#
def silu(x: 'Tensor') -> 'Tensor':
softmax#
def softmax(x: 'Tensor', axis: 'int' = -1) -> 'Tensor':
A composition of existing nabla ops
logsoftmax#
def logsoftmax(x: 'Tensor', axis: 'int' = -1) -> 'Tensor':
LogSoftmax implementation with sharding support.