add backprop.
This commit is contained in:
104
test/test_backprop.py
Normal file
104
test/test_backprop.py
Normal file
@@ -0,0 +1,104 @@
|
||||
import pytest
|
||||
from micrograd.engine import Value
|
||||
|
||||
|
||||
def test_backward_tanh():
|
||||
# inputs
|
||||
x = Value(0.8814)
|
||||
y = x.tanh()
|
||||
y.grad = 1.0
|
||||
y._backward()
|
||||
|
||||
assert pytest.approx(x.grad, 0.1) == 0.5
|
||||
|
||||
|
||||
def test_large_backprop():
|
||||
# inputs
|
||||
x1 = Value(2.0, label='x1')
|
||||
x2 = Value(0.0, label='x2')
|
||||
|
||||
# weights
|
||||
w1 = Value(-3.0, label='w1')
|
||||
w2 = Value(1.0, label='w2')
|
||||
|
||||
# bias
|
||||
b = Value(6.8813735870195432, label='b')
|
||||
|
||||
h1 = x1 * w1
|
||||
h1.label = 'h1'
|
||||
h2 = x2 * w2
|
||||
h2.label = 'h2'
|
||||
|
||||
h = h1 + h2
|
||||
h.label = 'h'
|
||||
|
||||
n = h + b
|
||||
n.label = 'n'
|
||||
y = n.tanh()
|
||||
y.label = 'y'
|
||||
|
||||
y.grad = 1.0
|
||||
y._backward()
|
||||
|
||||
assert pytest.approx(n.grad, 0.001) == 0.5
|
||||
assert h.grad == 0.0
|
||||
|
||||
n._backward()
|
||||
assert pytest.approx(b.grad, 0.001) == 0.5
|
||||
assert pytest.approx(h.grad, 0.001) == 0.5
|
||||
|
||||
b._backward()
|
||||
h._backward()
|
||||
assert pytest.approx(h1.grad, 0.001) == 0.5
|
||||
assert pytest.approx(h2.grad, 0.001) == 0.5
|
||||
|
||||
h1._backward()
|
||||
h2._backward()
|
||||
assert pytest.approx(x1.grad, 0.001) == -1.5
|
||||
assert pytest.approx(w1.grad, 0.001) == 1.0
|
||||
|
||||
assert pytest.approx(x2.grad, 0.001) == 0.5
|
||||
assert pytest.approx(w2.grad, 0.001) == 0.0
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="non-deterministic")
|
||||
def test_auto_diff():
|
||||
# inputs
|
||||
x1 = Value(2.0, label='x1')
|
||||
x2 = Value(0.0, label='x2')
|
||||
|
||||
# weights
|
||||
w1 = Value(-3.0, label='w1')
|
||||
w2 = Value(1.0, label='w2')
|
||||
|
||||
# bias
|
||||
b = Value(6.8813735870195432, label='b')
|
||||
|
||||
h1 = x1 * w1
|
||||
h1.label = 'h1'
|
||||
h2 = x2 * w2
|
||||
h2.label = 'h2'
|
||||
|
||||
h = h1 + h2
|
||||
h.label = 'h'
|
||||
|
||||
n = h + b
|
||||
n.label = 'n'
|
||||
y = n.tanh()
|
||||
y.label = 'y'
|
||||
|
||||
y.backward()
|
||||
assert pytest.approx(n.grad, 0.001) == 0.5
|
||||
assert h.grad == 0.0
|
||||
|
||||
assert pytest.approx(b.grad, 0.001) == 0.5
|
||||
assert pytest.approx(h.grad, 0.001) == 0.5
|
||||
|
||||
assert pytest.approx(h1.grad, 0.001) == 0.5
|
||||
assert pytest.approx(h2.grad, 0.001) == 0.5
|
||||
|
||||
assert pytest.approx(x1.grad, 0.001) == -1.5
|
||||
assert pytest.approx(w1.grad, 0.001) == 1.0
|
||||
|
||||
assert pytest.approx(x2.grad, 0.001) == 0.5
|
||||
assert pytest.approx(w2.grad, 0.001) == 0.0
|
||||
31
test/test_neuron.py
Normal file
31
test/test_neuron.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import pytest
|
||||
from micrograd.engine import Value
|
||||
|
||||
|
||||
# @pytest.mark.skip(reason="complicated assertion")
|
||||
def test_big_neuron():
|
||||
# inputs
|
||||
x1 = Value(2.0, label='x1')
|
||||
x2 = Value(0.0, label='x2')
|
||||
|
||||
# weights
|
||||
w1 = Value(-3.0, label='w1')
|
||||
w2 = Value(1.0, label='w2')
|
||||
|
||||
# bias
|
||||
b = Value(6.8813735870195432, label='b')
|
||||
|
||||
h1 = x1 * w1
|
||||
h1.label = 'h1'
|
||||
h2 = x2 * w2
|
||||
h2.label = 'h2'
|
||||
|
||||
h = h1 + h2
|
||||
h.label = 'h'
|
||||
|
||||
n = h + b
|
||||
n.label = 'n'
|
||||
y = n.tanh()
|
||||
y.label = 'y'
|
||||
|
||||
assert pytest.approx(y.data, 0.01) == 0.7071
|
||||
@@ -56,3 +56,16 @@ def test_operations():
|
||||
add = v1 + v2
|
||||
assert mul._op == '*'
|
||||
assert add._op == '+'
|
||||
|
||||
|
||||
def test_tanh():
|
||||
t = Value(2.0).tanh()
|
||||
assert t.data > 0
|
||||
assert t.data < 1
|
||||
|
||||
t = Value(0.0).tanh()
|
||||
assert t.data == 0
|
||||
|
||||
t = Value(-2.0).tanh()
|
||||
assert t.data < 0
|
||||
assert t.data > -1
|
||||
|
||||
Reference in New Issue
Block a user