import pytest from micrograd.engine import Value def test_backward_tanh(): # inputs x = Value(0.8814) y = x.tanh() y.grad = 1.0 y._backward() assert pytest.approx(x.grad, 0.1) == 0.5 def test_large_backprop(): # inputs x1 = Value(2.0, label='x1') x2 = Value(0.0, label='x2') # weights w1 = Value(-3.0, label='w1') w2 = Value(1.0, label='w2') # bias b = Value(6.8813735870195432, label='b') h1 = x1 * w1 h1.label = 'h1' h2 = x2 * w2 h2.label = 'h2' h = h1 + h2 h.label = 'h' n = h + b n.label = 'n' y = n.tanh() y.label = 'y' y.grad = 1.0 y._backward() assert pytest.approx(n.grad, 0.001) == 0.5 assert h.grad == 0.0 n._backward() assert pytest.approx(b.grad, 0.001) == 0.5 assert pytest.approx(h.grad, 0.001) == 0.5 b._backward() h._backward() assert pytest.approx(h1.grad, 0.001) == 0.5 assert pytest.approx(h2.grad, 0.001) == 0.5 h1._backward() h2._backward() assert pytest.approx(x1.grad, 0.001) == -1.5 assert pytest.approx(w1.grad, 0.001) == 1.0 assert pytest.approx(x2.grad, 0.001) == 0.5 assert pytest.approx(w2.grad, 0.001) == 0.0 @pytest.mark.skip(reason="non-deterministic") def test_auto_diff(): # inputs x1 = Value(2.0, label='x1') x2 = Value(0.0, label='x2') # weights w1 = Value(-3.0, label='w1') w2 = Value(1.0, label='w2') # bias b = Value(6.8813735870195432, label='b') h1 = x1 * w1 h1.label = 'h1' h2 = x2 * w2 h2.label = 'h2' h = h1 + h2 h.label = 'h' n = h + b n.label = 'n' y = n.tanh() y.label = 'y' y.backward() assert pytest.approx(n.grad, 0.001) == 0.5 assert h.grad == 0.0 assert pytest.approx(b.grad, 0.001) == 0.5 assert pytest.approx(h.grad, 0.001) == 0.5 assert pytest.approx(h1.grad, 0.001) == 0.5 assert pytest.approx(h2.grad, 0.001) == 0.5 assert pytest.approx(x1.grad, 0.001) == -1.5 assert pytest.approx(w1.grad, 0.001) == 1.0 assert pytest.approx(x2.grad, 0.001) == 0.5 assert pytest.approx(w2.grad, 0.001) == 0.0