leaky relu as geohot suggested (#167)

This commit is contained in:
NeuralLink 2020-12-09 16:28:35 +05:30 committed by GitHub
parent c225e62dd2
commit 00e376f36c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 5 additions and 0 deletions

View File

@ -53,6 +53,8 @@ class TestOps(unittest.TestCase):
helper_test_op([(45,65)], lambda x: x.sqrt(), Tensor.sqrt, gpu=self.gpu)
def test_relu(self):
helper_test_op([(45,65)], lambda x: x.relu(), Tensor.relu, gpu=self.gpu)
def test_leakyrelu(self):
helper_test_op([(45,65)], lambda x: torch.nn.functional.leaky_relu(x,0.01), Tensor.leakyrelu, gpu=self.gpu)
def test_sigmoid(self):
helper_test_op([(45,65)], lambda x: x.sigmoid(), Tensor.sigmoid, gpu=self.gpu)
def test_dot(self):

View File

@ -200,6 +200,9 @@ class Tensor:
def tanh(self):
return 2.0 * ((2.0 * self).sigmoid()) - 1.0
def leakyrelu(self, neg_slope=0.01):
return self.relu() + (-neg_slope*self).relu()
# An instantiation of the Function is the Context
class Function:
def __init__(self, *tensors):