removing docs since interface is now the same of pytorch, LOC < 50 ;)

This commit is contained in:
0xNaN 2020-10-22 00:56:43 +02:00
parent f6cad1d7d8
commit ac4ef3a588
1 changed files with 0 additions and 36 deletions

View File

@ -2,16 +2,6 @@ import numpy as np
from tinygrad.tensor import Tensor
def jacobian(func, input):
"""
Compute the (analytical) Jacobian of func w.r.t. input.
func : A tinygrad func
input : An input
returns:
J : Jacobian
"""
output = func(input)
ji = input.data.reshape(-1).shape[-1]
@ -34,20 +24,6 @@ def mask_like(like, mask_inx, mask_value = 1.0):
return mask.reshape(like.shape)
def numerical_jacobian(func, input, eps = 1e-6):
"""
Compute the Jacobian through Finite-Difference Approximation.
Somewhat inspired by [1] but not followed closely.
func : A tinygrad func
input : An input
eps : Perturbation step
returns:
NJ : an approx. of the Jacobian
[1]: https://timvieira.github.io/blog/post/2017/04/21/how-to-test-gradient-implementations/
"""
output = func(input)
ji = input.data.reshape(-1).shape[-1]
@ -67,18 +43,6 @@ def numerical_jacobian(func, input, eps = 1e-6):
return NJ
def gradcheck(func, input, eps = 1e-06, atol = 1e-5, rtol = 0.001):
"""
Checks whether the numerical approx. of the Jacobian of func w.r.t input is close to the
analytical one.
func : A tinygrad func
input : An input
eps : Perturbation step
atol, rtol: Params for the numpy.allclose test
returns:
test_passed : Bool, whether the test passed
"""
NJ = numerical_jacobian(func, input, eps)
J = jacobian(func, input)