mirror of https://github.com/commaai/tinygrad.git
Add more onnx ops (#615)
* Add Celu * Add thresholded relu * Add softsign
This commit is contained in:
parent
643e8b0388
commit
1ffe8d68d5
|
@ -104,16 +104,19 @@ def Neg(input): return -input
|
|||
def Reciprocal(input): return input.reciprocal()
|
||||
def Sqrt(input): return input.sqrt()
|
||||
def Sign(input): return input.sign()
|
||||
def Softsign(input): return input / (1+input.abs())
|
||||
def Abs(input): return input.abs()
|
||||
def Exp(input): return input.exp()
|
||||
def Log(input): return input.log()
|
||||
def Mish(input): return input.mish()
|
||||
def HardSigmoid(input, alpha=0.2, beta=0.5): return (alpha*input + beta).clip(0, 1)
|
||||
def HardSwish(input): return input * HardSigmoid(input, 1/6, 0.5)
|
||||
def Celu(X, alpha=1.0): return X.relu() - (-alpha*(X/alpha).exp()+1).relu()
|
||||
def Selu(X, alpha=1.67326319217681884765625, gamma=1.05070102214813232421875): return gamma * (X.relu() - (-alpha*X.exp()+alpha).relu())
|
||||
def Softplus(X): return X.softplus()
|
||||
def PRelu(X, slope): return X.leakyrelu(slope)
|
||||
def LeakyRelu(X, alpha=0.01): return X.leakyrelu(alpha)
|
||||
def ThresholdedRelu(X, alpha=1.0): return (X-alpha).relu() + (X-alpha).relu().sign() * alpha
|
||||
def Softmax(input, axis=-1): return input.softmax(axis)
|
||||
def LogSoftmax(input, axis=-1): return input.log_softmax(axis)
|
||||
def Clip(input, min=-3.4e38, max=3.4e38): return input.clip(min, max)
|
||||
|
|
Loading…
Reference in New Issue