From 00e376f36c9e2d20a49b7a051cd27318bd5a2792 Mon Sep 17 00:00:00 2001 From: NeuralLink <38289341+kartik4949@users.noreply.github.com> Date: Wed, 9 Dec 2020 16:28:35 +0530 Subject: [PATCH] leaky relu as geohot suggested (#167) --- test/test_ops.py | 2 ++ tinygrad/tensor.py | 3 +++ 2 files changed, 5 insertions(+) diff --git a/test/test_ops.py b/test/test_ops.py index 804dd61a..c4fcb251 100644 --- a/test/test_ops.py +++ b/test/test_ops.py @@ -53,6 +53,8 @@ class TestOps(unittest.TestCase): helper_test_op([(45,65)], lambda x: x.sqrt(), Tensor.sqrt, gpu=self.gpu) def test_relu(self): helper_test_op([(45,65)], lambda x: x.relu(), Tensor.relu, gpu=self.gpu) + def test_leakyrelu(self): + helper_test_op([(45,65)], lambda x: torch.nn.functional.leaky_relu(x,0.01), Tensor.leakyrelu, gpu=self.gpu) def test_sigmoid(self): helper_test_op([(45,65)], lambda x: x.sigmoid(), Tensor.sigmoid, gpu=self.gpu) def test_dot(self): diff --git a/tinygrad/tensor.py b/tinygrad/tensor.py index 4c33341b..f14f581c 100644 --- a/tinygrad/tensor.py +++ b/tinygrad/tensor.py @@ -200,6 +200,9 @@ class Tensor: def tanh(self): return 2.0 * ((2.0 * self).sigmoid()) - 1.0 + def leakyrelu(self, neg_slope=0.01): + return self.relu() + (-neg_slope*self).relu() + # An instantiation of the Function is the Context class Function: def __init__(self, *tensors):