Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 39 additions & 0 deletions test_tinytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -568,3 +568,42 @@ def attention(k, q, v, mask):
assert np.allclose(
vt.grad.numpy(), vtt.grad.data, atol=1e-5
), "Gradients do not match between PyTorch and tinytorch."


def test_xor_matmul_backward():
class XorNet(tinytorch.Module):
def __init__(self):
super().__init__()
self.l1 = tinytorch.Linear(2, 2)
self.l2 = tinytorch.Linear(2, 1)

def forward(self, x):
x = self.l1(x)
x = tinytorch.tanh(x)
x = self.l2(x)
x = tinytorch.tanh(x)
return x

x = tinytorch.tensor(
[
[0, 0],
[1, 0],
[0, 1],
[1, 1],
]
)
y = tinytorch.tensor(
[
[0],
[1],
[1],
[0],
]
)
model = XorNet()
loss = tinytorch.Tensor([0.0])
for i in range(2):
for x1, y1 in zip(x, y):
pred = model(x1)
loss += tinytorch.mse_loss(pred, y1)
loss.backward()
26 changes: 24 additions & 2 deletions tinytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,8 +375,30 @@ def transpose_last_axis(x: np.ndarray):
axes[dim1], axes[dim2] = dim2, dim1
return x.transpose(axes)

grad_x = np.matmul(grad.data, transpose_last_axis(y.data))
grad_y = transpose_last_axis(x.data) @ grad.data
if len(x.shape) == 1 and len(y.shape) == 1:
# vector * vector
grad_x = grad.data * y.data
grad_y = x.data * grad.data
elif len(x.shape) == 1:
# vector * matrix
grad_x = grad.data @ y.data.T
grad_y = np.outer(x.data, grad.data)
elif len(y.shape) == 1:
# matrix * vector
dim_diff = len(y.shape) - len(x.shape)
axis_to_sum = tuple(range(dim_diff))
grad_x = np.outer(grad.data, y).sum(axis=axis_to_sum)
grad_y = grad.data.T @ x.data
else:
# matrix * matrix
dim_diff = len(y.shape) - len(x.shape)
axis_to_sum = tuple(range(dim_diff))
print(axis_to_sum)
grad_x = (grad.data @ transpose_last_axis(y.data)).sum(axis=axis_to_sum)
dim_diff_y = len(x.shape) - len(y.shape)
axis_to_sum_y = tuple(range(dim_diff_y))
grad_y = (transpose_last_axis(x.data) @ grad.data).sum(axis=axis_to_sum_y)

return Tensor(grad_x), Tensor(grad_y)


Expand Down