2020-12-14 12:45:55 +08:00
|
|
|
import numpy as np
|
|
|
|
from tqdm import trange
|
2023-11-04 06:20:44 +08:00
|
|
|
from tinygrad.tensor import Tensor
|
|
|
|
from tinygrad.helpers import CI
|
2023-11-15 13:44:25 +08:00
|
|
|
from tinygrad.jit import TinyJit
|
|
|
|
|
2020-12-14 12:45:55 +08:00
|
|
|
|
2023-08-22 05:14:54 +08:00
|
|
|
def train(model, X_train, Y_train, optim, steps, BS=128, lossfn=lambda out,y: out.sparse_categorical_crossentropy(y),
|
2022-07-16 23:32:42 +08:00
|
|
|
transform=lambda x: x, target_transform=lambda x: x, noloss=False):
|
2023-11-15 13:44:25 +08:00
|
|
|
|
|
|
|
@TinyJit
|
|
|
|
def train_step(x, y):
|
|
|
|
# network
|
|
|
|
out = model.forward(x) if hasattr(model, 'forward') else model(x)
|
|
|
|
loss = lossfn(out, y)
|
|
|
|
optim.zero_grad()
|
|
|
|
loss.backward()
|
|
|
|
if noloss: del loss
|
|
|
|
optim.step()
|
|
|
|
if noloss: return (None, None)
|
|
|
|
cat = out.argmax(axis=-1)
|
|
|
|
accuracy = (cat == y).mean()
|
|
|
|
return loss.realize(), accuracy.realize()
|
|
|
|
|
2023-09-29 09:02:31 +08:00
|
|
|
with Tensor.train():
|
|
|
|
losses, accuracies = [], []
|
2023-11-04 06:20:44 +08:00
|
|
|
for i in (t := trange(steps, disable=CI)):
|
2023-09-29 09:02:31 +08:00
|
|
|
samp = np.random.randint(0, X_train.shape[0], size=(BS))
|
|
|
|
x = Tensor(transform(X_train[samp]), requires_grad=False)
|
|
|
|
y = Tensor(target_transform(Y_train[samp]))
|
2023-11-15 13:44:25 +08:00
|
|
|
loss, accuracy = train_step(x, y)
|
2023-09-29 09:02:31 +08:00
|
|
|
# printing
|
|
|
|
if not noloss:
|
2023-11-15 13:44:25 +08:00
|
|
|
loss, accuracy = loss.numpy(), accuracy.numpy()
|
2023-09-29 09:02:31 +08:00
|
|
|
losses.append(loss)
|
|
|
|
accuracies.append(accuracy)
|
|
|
|
t.set_description("loss %.2f accuracy %.2f" % (loss, accuracy))
|
2023-07-05 23:13:38 +08:00
|
|
|
return [losses, accuracies]
|
2020-12-14 12:45:55 +08:00
|
|
|
|
2023-07-22 01:37:55 +08:00
|
|
|
|
|
|
|
def evaluate(model, X_test, Y_test, num_classes=None, BS=128, return_predict=False, transform=lambda x: x,
|
2021-06-22 00:37:24 +08:00
|
|
|
target_transform=lambda y: y):
|
2020-12-29 11:45:46 +08:00
|
|
|
Tensor.training = False
|
2021-06-22 00:37:24 +08:00
|
|
|
def numpy_eval(Y_test, num_classes):
|
2020-12-28 22:24:51 +08:00
|
|
|
Y_test_preds_out = np.zeros(list(Y_test.shape)+[num_classes])
|
2023-11-04 06:20:44 +08:00
|
|
|
for i in trange((len(Y_test)-1)//BS+1, disable=CI):
|
2021-06-22 00:37:24 +08:00
|
|
|
x = Tensor(transform(X_test[i*BS:(i+1)*BS]))
|
2023-01-31 05:13:55 +08:00
|
|
|
out = model.forward(x) if hasattr(model, 'forward') else model(x)
|
2023-08-22 00:53:29 +08:00
|
|
|
Y_test_preds_out[i*BS:(i+1)*BS] = out.numpy()
|
2020-12-29 23:40:11 +08:00
|
|
|
Y_test_preds = np.argmax(Y_test_preds_out, axis=-1)
|
2021-06-22 00:37:24 +08:00
|
|
|
Y_test = target_transform(Y_test)
|
2021-01-02 23:45:09 +08:00
|
|
|
return (Y_test == Y_test_preds).mean(), Y_test_preds
|
2020-12-14 12:45:55 +08:00
|
|
|
|
|
|
|
if num_classes is None: num_classes = Y_test.max().astype(int)+1
|
2021-06-22 00:37:24 +08:00
|
|
|
acc, Y_test_pred = numpy_eval(Y_test, num_classes)
|
2021-01-02 23:45:09 +08:00
|
|
|
print("test set accuracy is %f" % acc)
|
|
|
|
return (acc, Y_test_pred) if return_predict else acc
|
2020-12-29 11:45:46 +08:00
|
|
|
|