2023-02-27 23:54:47 +08:00
|
|
|
#!/usr/bin/env python
|
|
|
|
import unittest
|
2023-11-12 12:00:35 +08:00
|
|
|
from tinygrad.tensor import Tensor
|
2024-07-13 04:26:50 +08:00
|
|
|
from tinygrad.ops import MetaOps, BufferOps
|
2023-02-27 23:54:47 +08:00
|
|
|
from tinygrad.nn import Conv2d
|
2024-03-27 12:02:46 +08:00
|
|
|
from tinygrad.engine.schedule import create_schedule
|
2024-07-22 17:16:13 +08:00
|
|
|
from tinygrad.shape.shapetracker import ShapeTracker, View
|
|
|
|
from tinygrad.helpers import prod
|
|
|
|
from test.unit.test_shapetracker import shapetracker_getitem
|
2023-07-24 04:00:56 +08:00
|
|
|
|
2023-02-27 23:54:47 +08:00
|
|
|
class TestConvShapetracker(unittest.TestCase):
|
|
|
|
def test_conv_3x3_one_view(self):
|
2023-11-12 12:00:35 +08:00
|
|
|
conv = Conv2d(16, 32, (3, 3))
|
|
|
|
seen = set()
|
|
|
|
|
|
|
|
# first run to init the weights, they are saved in seen
|
2024-02-13 01:10:45 +08:00
|
|
|
create_schedule([conv(Tensor.empty(1, 16, 10, 10)).lazydata], seen)
|
2023-11-12 12:00:35 +08:00
|
|
|
# run it again to get the kernels
|
2024-07-18 10:41:23 +08:00
|
|
|
sched = [si for si in create_schedule([conv(Tensor.empty(1, 16, 10, 10)).lazydata], seen) if si.ast.op is MetaOps.KERNEL]
|
2023-11-12 12:00:35 +08:00
|
|
|
assert len(sched) == 1, f"conv should only have one kernel, getting {len(sched)}"
|
2024-08-15 22:27:32 +08:00
|
|
|
for st in [x.arg.st for x in sched[0].ast.parents if x.op is BufferOps.LOAD]:
|
2024-03-30 11:50:27 +08:00
|
|
|
assert len(st.views) == 1
|
2023-02-27 23:54:47 +08:00
|
|
|
|
2024-07-22 17:16:13 +08:00
|
|
|
def test_conv_2x2_backward_one_view(self):
|
|
|
|
X = Tensor.rand(1, 1, 3, 3, requires_grad=True)
|
|
|
|
conv = Conv2d(1, 1, (2, 2), bias=False)
|
|
|
|
conv(X).mean().backward()
|
|
|
|
si = X.grad.schedule()[-1]
|
|
|
|
print(si)
|
2024-08-15 22:27:32 +08:00
|
|
|
ldb = [x for x in si.ast.parents if x.op is BufferOps.LOAD][0]
|
2024-07-22 17:16:13 +08:00
|
|
|
st: ShapeTracker = ldb.arg.st.simplify()
|
|
|
|
# NOTE: st.real_size() is broken
|
|
|
|
print(si.inputs[0].size)
|
|
|
|
#self.assertEqual(si.inputs[0].size, st.real_size())
|
|
|
|
for v in st.views: print(v)
|
|
|
|
|
|
|
|
# same st
|
|
|
|
test_st = ShapeTracker((
|
|
|
|
View(shape=(1, 1, 2, 4, 2, 4), strides=(0, 0, 2, 8, 1, 4), offset=0, mask=((0, 1), (0, 1), (0, 2), (0, 2), (0, 2), (0, 2)), contiguous=False),
|
|
|
|
View(shape=(1, 1, 1, 1, 3, 3, 3, 3), strides=(0, 0, 0, 0, 24, 8, 3, 1), offset=0,
|
|
|
|
mask=((0, 1), (0, 1), (0, 1), (0, 1), (0, 2), (0, 3), (0, 2), (0, 3)), contiguous=False)))
|
|
|
|
#test_st = ShapeTracker((
|
|
|
|
# View(shape=(2,4), strides=(1,4), offset=0, mask=None, contiguous=False),
|
|
|
|
#)).simplify()
|
|
|
|
#View(shape=(1, 1, 2, 4, 2, 4), strides=(0, 0, 2, 8, 1, 4), offset=0, mask=((0, 1), (0, 1), (0, 2), (0, 2), (0, 2), (0, 2)), contiguous=False),
|
|
|
|
#View(shape=(1, 1, 1, 1, 3, 3, 3, 3), strides=(0, 0, 0, 0, 24, 8, 3, 1), offset=0,
|
|
|
|
# mask=((0, 1), (0, 1), (0, 1), (0, 1), (0, 2), (0, 3), (0, 2), (0, 3)), contiguous=False))).simplify()
|
|
|
|
print("*** new ***")
|
|
|
|
for v in test_st.views: print(v)
|
|
|
|
for i in range(prod(st.shape)):
|
|
|
|
i1, i2 = shapetracker_getitem(st, i), shapetracker_getitem(test_st, i)
|
|
|
|
print(i, i1, i2, si.inputs[0].size, i1==i2)
|
|
|
|
#self.assertEqual(i1, i2)
|
|
|
|
|
|
|
|
for stt in [st, test_st]:
|
|
|
|
s,va = stt.expr_idxs()
|
|
|
|
print(s)
|
|
|
|
print(va)
|
2024-08-15 04:58:04 +08:00
|
|
|
with self.assertRaises(AssertionError):
|
|
|
|
assert len(st.views) <= 2
|
2024-07-22 17:16:13 +08:00
|
|
|
|
2023-02-27 23:54:47 +08:00
|
|
|
if __name__ == '__main__':
|
|
|
|
unittest.main()
|