Skip to content

Commit

Permalink
fixup! Add test for dummy population axons
Browse files Browse the repository at this point in the history
  • Loading branch information
hunse committed Nov 26, 2019
1 parent 8999ee7 commit c6a8d04
Showing 1 changed file with 24 additions and 7 deletions.
31 changes: 24 additions & 7 deletions nengo_loihi/tests/test_conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -1075,16 +1075,26 @@ def test_conv_overlap_input(Simulator, plt):


@pytest.mark.target_loihi
@pytest.mark.parametrize("on_chip", [True, False])
@pytest.mark.parametrize("precompute", [True, False])
@pytest.mark.parametrize("pop_type", [16, 32])
@pytest.mark.parametrize("channels_last", [True, False])
def test_population_axons(precompute, pop_type, channels_last, Simulator, rng):
"""On the chip, dummy axons were still having an effect. Check this is fixed."""
def test_chip_population_axons(
on_chip, precompute, pop_type, channels_last, Simulator, rng
):
"""Check that all types of population axons work as inputs or between cores.
Also, on the chip, dummy axons were still having an effect. Check this is fixed.
"""

def conv_layer(x, *args, label=None, **kwargs):
def conv_layer(input=None, *args, label=None, **kwargs):
conv = nengo.Convolution(*args, **kwargs)
layer = nengo.Ensemble(conv.output_shape.size, 1, label=label)
conn = nengo.Connection(x, layer.neurons, transform=conv)
conn = (
nengo.Connection(input, layer.neurons, transform=conv)
if input is not None
else None
)
return layer, conv, conn

if pop_type == 16 and not channels_last:
Expand All @@ -1102,7 +1112,7 @@ def conv_layer(x, *args, label=None, **kwargs):
)
X = rng.uniform(0.2, 1, size=input_shape.shape)
kernel0 = rng.uniform(0.2, 1, size=(1, 1, 1, n_filters0))
kernel1 = rng.uniform(0.2, 1, size=(3, 3, n_filters0, n_filters1))
kernel1 = rng.uniform(0.1, 0.5, size=(3, 3, n_filters0, n_filters1))

with nengo.Network(seed=0) as net:
nengo_loihi.add_params(net)
Expand All @@ -1113,7 +1123,7 @@ def conv_layer(x, *args, label=None, **kwargs):
net.config[nengo.Ensemble].intercepts = nengo.dists.Choice([0])
net.config[nengo.Connection].synapse = 0.005

inp = nengo.Node(X.ravel())
inp = nengo.Node(X.ravel()) if not on_chip else None

# first layer is off-chip to translate the inputs into spikes
layer0, conv0, _ = conv_layer(
Expand All @@ -1125,7 +1135,14 @@ def conv_layer(x, *args, label=None, **kwargs):
init=kernel0,
label="layer0",
)
net.config[layer0].on_chip = False

net.config[layer0].on_chip = on_chip
if on_chip:
assert kernel0.shape[:2] == (1, 1)
w = kernel0[0, 0]
Y = X.dot(w) if channels_last else np.tensordot(w.T, X, axes=1)
layer0.gain = nengo.dists.Choice([0.0])
layer0.bias = Y.ravel() * max_rate

layer1, conv1, conn1 = conv_layer(
layer0.neurons,
Expand Down

0 comments on commit c6a8d04

Please sign in to comment.