finish migrograd main functions.

This commit is contained in:
PublicMatt
2024-03-18 19:45:15 -07:00
parent 4c9f7d8d7d
commit 5b0a350dd4
11 changed files with 399 additions and 16 deletions

View File

@@ -2,16 +2,6 @@ import pytest
from micrograd.engine import Value
def test_backward_tanh():
# inputs
x = Value(0.8814)
y = x.tanh()
y.grad = 1.0
y._backward()
assert pytest.approx(x.grad, 0.1) == 0.5
def test_large_backprop():
# inputs
x1 = Value(2.0, label='x1')
@@ -107,3 +97,80 @@ def test_accumulation():
b = a + a
b.backward()
assert a.grad == 2.0
def test_backward_tanh():
# inputs
x = Value(0.8814)
y = x.tanh()
y.backward()
assert pytest.approx(x.grad, 0.1) == 0.5
def test_backward_exp():
# inputs
x = Value(1.0)
y = x.exp()
y.backward()
assert pytest.approx(x.grad, 0.1) == 2.7
def test_backward_pow():
# inputs
x = Value(1.0)
y = x ** 2
y.backward()
assert x.grad == 2.0
def test_backward_div():
a = Value(4.0)
b = Value(2.0)
c = a / b
c.backward()
assert a.grad == 0.5
def test_auto_diff_replace_tan_with_exp():
# inputs
x1 = Value(2.0, label='x1')
x2 = Value(0.0, label='x2')
# weights
w1 = Value(-3.0, label='w1')
w2 = Value(1.0, label='w2')
# bias
b = Value(6.8813735870195432, label='b')
h1 = x1 * w1
h1.label = 'h1'
h2 = x2 * w2
h2.label = 'h2'
h = h1 + h2
h.label = 'h'
n = h + b
n.label = 'n'
e = (2*n).exp()
y = (e - 1) / (e + 1)
y.label = 'y'
y.backward()
assert pytest.approx(n.grad, 0.001) == 0.5
assert pytest.approx(b.grad, 0.001) == 0.5
assert pytest.approx(h.grad, 0.001) == 0.5
assert pytest.approx(h1.grad, 0.001) == 0.5
assert pytest.approx(h2.grad, 0.001) == 0.5
assert pytest.approx(x1.grad, 0.001) == -1.5
assert pytest.approx(w1.grad, 0.001) == 1.0
assert pytest.approx(x2.grad, 0.001) == 0.5
assert pytest.approx(w2.grad, 0.001) == 0.0

66
test/test_nn.py Normal file
View File

@@ -0,0 +1,66 @@
from micrograd.nn import Neuron, Layer, MLP
from micrograd.engine import Value
import pytest
def test_init_neuron():
inputs = 2
x = [1.0, 0.0]
n = Neuron(inputs)
assert len(n.w) == inputs
y = n(x)
assert isinstance(y, Value)
def test_mismatch_number():
with pytest.raises(ValueError):
x = [1.0, 0.0]
n = Neuron(7)
n(x)
def test_large_n_in():
n_in = 100
x = [1.0] * n_in
n = Neuron(n_in)
y = n(x)
assert isinstance(y, Value)
def test_well_known_weights():
x = [1.0, 0.0]
w = [Value(0.0), Value(0.0)]
b = Value(0.0)
n = Neuron(2)
n.w = w
n.b = b
y = n(x)
assert y.data == sum([x[0] * w[0], x[1] * w[1], b]).tanh().data
def test_mlp():
x = [2.0, 3.0, -1.0]
n = MLP(3, [4, 4, 4])
y = n(x)
assert len(y) == 4
def test_mlp_single_out():
x = [2.0, 3.0, -1.0]
n = MLP(3, [4, 4, 1])
y = n(x)
assert isinstance(y, Value)
def test_sample_mlp():
n = MLP(3, [4, 4, 1])
xs = [
[2.0, 3.0, -1.0],
[3.0, -1.0, 0.5],
[0.5, 1.0, 1.0],
[1.0, 1.0, 1.0],
]
y_true = [1.0, -1.0, -1.0, 1.0]
y_pred = [n(x) for x in xs]
mse = sum([(y_p - y_t)**2 for y_p, y_t in zip(y_pred, y_true)])
def test_mlp_parameters():
n = MLP(3, [4, 4, 1])
assert len(n.parameters()) == 41

View File

@@ -1,3 +1,4 @@
import pytest
from micrograd.engine import Value
@@ -11,21 +12,21 @@ def test_value_repr():
assert "Value(data=2.0)" == repr(v)
def test_value_add():
def test_value_add_opt():
v1 = Value(2.0)
v2 = Value(4.0)
assert (v1 + v2).data == 6.0
assert "Value(data=6.0)" == repr(v1 + v2)
def test_value_sub():
def test_value_sub_opt():
v1 = Value(2.0)
v2 = Value(4.0)
assert (v1 - v2).data == -2.0
assert "Value(data=-2.0)" == repr(v1 - v2)
def test_value_mul():
def test_value_mul_opt():
v1 = Value(2.0)
v2 = Value(4.0)
v3 = Value(-1.0)
@@ -33,6 +34,36 @@ def test_value_mul():
assert (v1 * v3).data == -2.0
def test_value_rmul_opt():
a = Value(2.0)
b = 2 * a
assert b.data == 4.0
def test_value_pow_opt():
a = Value(2.0)
b = a ** 2
assert b.data == 4.0
def test_value_exp_opt():
a = Value(1.0)
b = a.exp()
assert pytest.approx(b.data, 0.1) == 2.7
def test_value_int_opt():
a = Value(2.0)
b = a - 1
assert b.data == 1.0
def test_value_div_opt():
a = Value(2.0)
b = a / 2
assert b.data == 1.0
def test_value_mul_add():
v1 = Value(2.0)
v2 = Value(4.0)