Add dropout test (#265)

* Add dropout test

* Remove condition where training is false

* Skip dropout test when on GPU

* Revert changes to tensor.py and fix test case

* Revert change on whitespace

* Convert Tensor to cpu for testing

* Fix whitespace in tensor.py
This commit is contained in:
Jacky Lee 2021-06-19 08:49:13 -07:00 committed by GitHub
parent ca0a38f2d5
commit 3a91d5434f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 8 additions and 1 deletions

View File

@ -64,6 +64,14 @@ class TestTinygrad(unittest.TestCase):
for x,y in zip(test_tinygrad(), test_pytorch()): for x,y in zip(test_tinygrad(), test_pytorch()):
np.testing.assert_allclose(x, y, atol=1e-5) np.testing.assert_allclose(x, y, atol=1e-5)
def test_dropout(self):
Tensor.training = True
n, rate = 1_000_000, 0.1
w = Tensor.ones(n).dropout(rate)
non_zeros = np.count_nonzero(w.cpu().data)
expected = n * (1 - rate)
np.testing.assert_allclose(non_zeros, expected, rtol=1e-3)
@unittest.skipUnless(not DEFAULT_DEVICE, "float64 not supported on GPU") @unittest.skipUnless(not DEFAULT_DEVICE, "float64 not supported on GPU")
def test_jacobian(self): def test_jacobian(self):
W = np.random.RandomState(1337).random((10, 5)) W = np.random.RandomState(1337).random((10, 5))

View File

@ -267,7 +267,6 @@ class Tensor:
return self - ss return self - ss
def dropout(self, p=0.5): def dropout(self, p=0.5):
# TODO: this needs a test
if Tensor.training: if Tensor.training:
_mask = np.asarray(np.random.binomial(1, 1.0-p, size=self.shape), dtype=self.dtype) _mask = np.asarray(np.random.binomial(1, 1.0-p, size=self.shape), dtype=self.dtype)
return self * Tensor(_mask, requires_grad=False, device=self.device) * (1/(1.0 - p)) return self * Tensor(_mask, requires_grad=False, device=self.device) * (1/(1.0 - p))