67 lines
1.3 KiB
Python
67 lines
1.3 KiB
Python
|
from micrograd.nn import Neuron, Layer, MLP
|
||
|
from micrograd.engine import Value
|
||
|
import pytest
|
||
|
|
||
|
def test_init_neuron():
|
||
|
inputs = 2
|
||
|
x = [1.0, 0.0]
|
||
|
n = Neuron(inputs)
|
||
|
assert len(n.w) == inputs
|
||
|
y = n(x)
|
||
|
assert isinstance(y, Value)
|
||
|
|
||
|
def test_mismatch_number():
|
||
|
with pytest.raises(ValueError):
|
||
|
x = [1.0, 0.0]
|
||
|
n = Neuron(7)
|
||
|
n(x)
|
||
|
|
||
|
def test_large_n_in():
|
||
|
n_in = 100
|
||
|
x = [1.0] * n_in
|
||
|
n = Neuron(n_in)
|
||
|
y = n(x)
|
||
|
assert isinstance(y, Value)
|
||
|
|
||
|
def test_well_known_weights():
|
||
|
x = [1.0, 0.0]
|
||
|
w = [Value(0.0), Value(0.0)]
|
||
|
b = Value(0.0)
|
||
|
|
||
|
n = Neuron(2)
|
||
|
n.w = w
|
||
|
n.b = b
|
||
|
y = n(x)
|
||
|
|
||
|
assert y.data == sum([x[0] * w[0], x[1] * w[1], b]).tanh().data
|
||
|
|
||
|
|
||
|
|
||
|
def test_mlp():
|
||
|
x = [2.0, 3.0, -1.0]
|
||
|
n = MLP(3, [4, 4, 4])
|
||
|
y = n(x)
|
||
|
assert len(y) == 4
|
||
|
|
||
|
def test_mlp_single_out():
|
||
|
x = [2.0, 3.0, -1.0]
|
||
|
n = MLP(3, [4, 4, 1])
|
||
|
y = n(x)
|
||
|
assert isinstance(y, Value)
|
||
|
|
||
|
def test_sample_mlp():
|
||
|
n = MLP(3, [4, 4, 1])
|
||
|
xs = [
|
||
|
[2.0, 3.0, -1.0],
|
||
|
[3.0, -1.0, 0.5],
|
||
|
[0.5, 1.0, 1.0],
|
||
|
[1.0, 1.0, 1.0],
|
||
|
]
|
||
|
y_true = [1.0, -1.0, -1.0, 1.0]
|
||
|
y_pred = [n(x) for x in xs]
|
||
|
mse = sum([(y_p - y_t)**2 for y_p, y_t in zip(y_pred, y_true)])
|
||
|
|
||
|
def test_mlp_parameters():
|
||
|
n = MLP(3, [4, 4, 1])
|
||
|
assert len(n.parameters()) == 41
|