import pytest from micrograd.engine import Value def test_large_backprop(): # inputs x1 = Value(2.0, label='x1') x2 = Value(0.0, label='x2') # weights w1 = Value(-3.0, label='w1') w2 = Value(1.0, label='w2') # bias b = Value(6.8813735870195432, label='b') h1 = x1 * w1 h1.label = 'h1' h2 = x2 * w2 h2.label = 'h2' h = h1 + h2 h.label = 'h' n = h + b n.label = 'n' y = n.tanh() y.label = 'y' y.grad = 1.0 y._backward() assert pytest.approx(n.grad, 0.001) == 0.5 assert h.grad == 0.0 n._backward() assert pytest.approx(b.grad, 0.001) == 0.5 assert pytest.approx(h.grad, 0.001) == 0.5 b._backward() h._backward() assert pytest.approx(h1.grad, 0.001) == 0.5 assert pytest.approx(h2.grad, 0.001) == 0.5 h1._backward() h2._backward() assert pytest.approx(x1.grad, 0.001) == -1.5 assert pytest.approx(w1.grad, 0.001) == 1.0 assert pytest.approx(x2.grad, 0.001) == 0.5 assert pytest.approx(w2.grad, 0.001) == 0.0 def test_auto_diff(): # inputs x1 = Value(2.0, label='x1') x2 = Value(0.0, label='x2') # weights w1 = Value(-3.0, label='w1') w2 = Value(1.0, label='w2') # bias b = Value(6.8813735870195432, label='b') h1 = x1 * w1 h1.label = 'h1' h2 = x2 * w2 h2.label = 'h2' h = h1 + h2 h.label = 'h' n = h + b n.label = 'n' y = n.tanh() y.label = 'y' y.backward() assert pytest.approx(n.grad, 0.001) == 0.5 assert pytest.approx(b.grad, 0.001) == 0.5 assert pytest.approx(h.grad, 0.001) == 0.5 assert pytest.approx(h1.grad, 0.001) == 0.5 assert pytest.approx(h2.grad, 0.001) == 0.5 assert pytest.approx(x1.grad, 0.001) == -1.5 assert pytest.approx(w1.grad, 0.001) == 1.0 assert pytest.approx(x2.grad, 0.001) == 0.5 assert pytest.approx(w2.grad, 0.001) == 0.0 def test_accumulation(): a = Value(2.0, label='x1') b = a + a b.backward() assert a.grad == 2.0 def test_backward_tanh(): # inputs x = Value(0.8814) y = x.tanh() y.backward() assert pytest.approx(x.grad, 0.1) == 0.5 def test_backward_exp(): # inputs x = Value(1.0) y = x.exp() y.backward() assert pytest.approx(x.grad, 0.1) == 2.7 def test_backward_pow(): # inputs x = Value(1.0) y = x ** 2 y.backward() assert x.grad == 2.0 def test_backward_div(): a = Value(4.0) b = Value(2.0) c = a / b c.backward() assert a.grad == 0.5 def test_auto_diff_replace_tan_with_exp(): # inputs x1 = Value(2.0, label='x1') x2 = Value(0.0, label='x2') # weights w1 = Value(-3.0, label='w1') w2 = Value(1.0, label='w2') # bias b = Value(6.8813735870195432, label='b') h1 = x1 * w1 h1.label = 'h1' h2 = x2 * w2 h2.label = 'h2' h = h1 + h2 h.label = 'h' n = h + b n.label = 'n' e = (2*n).exp() y = (e - 1) / (e + 1) y.label = 'y' y.backward() assert pytest.approx(n.grad, 0.001) == 0.5 assert pytest.approx(b.grad, 0.001) == 0.5 assert pytest.approx(h.grad, 0.001) == 0.5 assert pytest.approx(h1.grad, 0.001) == 0.5 assert pytest.approx(h2.grad, 0.001) == 0.5 assert pytest.approx(x1.grad, 0.001) == -1.5 assert pytest.approx(w1.grad, 0.001) == 1.0 assert pytest.approx(x2.grad, 0.001) == 0.5 assert pytest.approx(w2.grad, 0.001) == 0.0