Package com.github.neuralnetworks.architecture

Examples of com.github.neuralnetworks.architecture.NeuralNetworkImpl


  Environment.getInstance().setUseDataSharedMemory(true);
  Environment.getInstance().setUseWeightsSharedMemory(true);

  // CNN
  NeuralNetworkImpl cnn = NNFactory.convNN(new int[][] { { 2, 1, 1 }, { 1, 1 }, { 4 }, {1} }, false);
  cnn.setLayerCalculator(NNFactory.lcSigmoid(cnn, null));
  NNFactory.lcMaxPooling(cnn);
  FullyConnected cnnfci = (FullyConnected) cnn.getOutputLayer().getConnections().get(0).getInputLayer().getConnections().get(0);
  cnnfci.getWeights().set(0.02f, 0, 0);
  cnnfci.getWeights().set(0.01f, 1, 0);
  cnnfci.getWeights().set(0.03f, 2, 0);
  cnnfci.getWeights().set(0.001f, 3, 0);
  cnnfci.getWeights().set(0.005f, 0, 1);
  cnnfci.getWeights().set(0.04f, 1, 1);
  cnnfci.getWeights().set(0.02f, 2, 1);
  cnnfci.getWeights().set(0.009f, 3, 1);

  FullyConnected cnnfco = (FullyConnected) cnn.getOutputLayer().getConnections().get(0);
  cnnfco.getWeights().set(0.05f, 0, 0);
  cnnfco.getWeights().set(0.08f, 0, 1);

  // MLP
  NeuralNetworkImpl mlp = NNFactory.mlpSigmoid(new int[] { 2, 4, 1 }, false);

  FullyConnected mlpfci = (FullyConnected) mlp.getOutputLayer().getConnections().get(0).getInputLayer().getConnections().get(0);
  mlpfci.getWeights().set(0.02f, 0, 0);
  mlpfci.getWeights().set(0.01f, 1, 0);
  mlpfci.getWeights().set(0.03f, 2, 0);
  mlpfci.getWeights().set(0.001f, 3, 0);
  mlpfci.getWeights().set(0.005f, 0, 1);
  mlpfci.getWeights().set(0.04f, 1, 1);
  mlpfci.getWeights().set(0.02f, 2, 1);
  mlpfci.getWeights().set(0.009f, 3, 1);

  FullyConnected mlpfco = (FullyConnected) mlp.getOutputLayer().getConnections().get(0);
  mlpfco.getWeights().set(0.05f, 0, 0);
  mlpfco.getWeights().set(0.08f, 0, 1);

  // compare bp
  SimpleInputProvider inputProvider = new SimpleInputProvider(new float[][] { {0, 0}, {0, 1}, {1, 0}, {1, 1} }, new float[][] { {0}, {1}, {1}, {0} });
View Full Code Here


  assertEquals(18, tp2.get(s2).getStartIndex(), 0);
    }

    @Test
    public void testTensorProvider2() {
  NeuralNetworkImpl nn = new NeuralNetworkImpl();

  Layer i = new Layer();
  Layer h = new Layer();
  Layer o = new Layer();

  nn.addLayer(i);

  Environment.getInstance().setUseWeightsSharedMemory(true);
  ConnectionFactory cf = new ConnectionFactory();
  NNFactory.addFullyConnectedLayer(nn, h, cf, 2, 3, true);
  NNFactory.addFullyConnectedLayer(nn, o, cf, 4, 1, true);

  ValuesProvider tp = TensorFactory.tensorProvider(nn, 2, true);

  Matrix im = tp.get(nn.getInputLayer());
  Matrix hm1 = tp.get(h, 3, 2);
  Matrix hm2 = tp.get(h, 4, 2);

  Tensor om = tp.get(o);

  assertTrue(im == tp.get(i, 2, 2));
  assertTrue(im == tp.get(i));
  assertTrue(hm1 == tp.get(h, 3, 2));
  assertTrue(hm2 == tp.get(h, 4, 2));
  assertTrue(hm1 == TensorFactory.tensor(h, nn.getConnection(i, h), tp));
  assertTrue(hm2 == TensorFactory.tensor(h, nn.getConnection(h, o), tp));
  assertTrue(om == tp.get(o, 1, 2));
  assertTrue(om == tp.get(o));
    }
View Full Code Here

    @Test
    public void testTensorProvider3() {
  // simple mlp test
  Environment.getInstance().setUseWeightsSharedMemory(true);
  NeuralNetworkImpl nn = NNFactory.mlp(new int[] { 3, 4, 2 }, true);
  ValuesProvider tp = TensorFactory.tensorProvider(nn, 2, false);

  Matrix in = tp.get(nn.getInputLayer());
  Matrix hidden = tp.get(nn.getLayers().stream().filter(l -> l != nn.getInputLayer() && l != nn.getOutputLayer() && !Util.isBias(l)).findFirst().get());
  Matrix out = tp.get(nn.getOutputLayer());

  assertEquals(6, in.getElements().length, 0);
  assertEquals(3, in.getRows(), 0);
  assertEquals(2, in.getColumns(), 0);
  assertEquals(8, hidden.getElements().length, 0);
View Full Code Here

    }

    @Test
    public void testRandomInitializer() {
  Environment.getInstance().setUseWeightsSharedMemory(true);
  NeuralNetworkImpl nn = NNFactory.mlp(new int[] { 3, 2 }, true);
  NNRandomInitializer rand = new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.1f, 0.1f), 0.5f);
  rand.initialize(nn);

  for (Layer l : nn.getLayers()) {
      if (Util.isBias(l)) {
    Tensor t = ((FullyConnected) l.getConnections().get(0)).getWeights();
    float[] elements = t.getElements();
    t.forEach(i -> assertEquals(0.5, elements[i], 0f));
      } else {
    Tensor t = ((FullyConnected) l.getConnections().get(0)).getWeights();
    float[] elements = t.getElements();
    t.forEach(i -> assertTrue(elements[i] >= -0.1f && elements[i] <= 0.1f && elements[i] != 0));
      }
  }

  rand = new NNRandomInitializer(new MersenneTwisterRandomInitializer(2f, 3f), new MersenneTwisterRandomInitializer(-2f, -1f));
  rand.initialize(nn);

  for (Layer l : nn.getLayers()) {
      if (Util.isBias(l)) {
    Tensor t = ((FullyConnected) l.getConnections().get(0)).getWeights();
    float[] elements = t.getElements();
    t.forEach(i -> assertTrue(elements[i] >= -2f && elements[i] <= -1f));
      } else {
View Full Code Here

    }

    @Test
    public void testCNNConstruction2() {
  Environment.getInstance().setUseWeightsSharedMemory(true);
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 5, 5, 20, 1 }, { 2, 2 }, { 5, 5, 50, 1 }, { 2, 2 }, {500}, {10} }, true);
  assertEquals(11, nn.getLayers().size(), 0);

  Conv2DConnection cc = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  Layer l = nn.getInputLayer().getConnections().get(0).getOutputLayer();
  assertEquals(24, cc.getOutputFeatureMapRows(), 0);
  assertEquals(24, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(20, cc.getOutputFilters(), 0);

  Subsampling2DConnection sc = (Subsampling2DConnection) l.getConnections().get(2);
View Full Code Here

    }

    @Test
    public void testCNNConstruction3() {
  Environment.getInstance().setUseWeightsSharedMemory(true);
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 6, 6, 1 }, { 3, 3, 2, 2 }, { 2, 2 } }, true);
  assertEquals(4, nn.getLayers().size(), 0);

  Conv2DConnection cc = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  Layer l = nn.getInputLayer().getConnections().get(0).getOutputLayer();
  assertEquals(2, cc.getOutputFeatureMapRows(), 0);
  assertEquals(2, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(2, cc.getOutputFilters(), 0);

  Subsampling2DConnection sc = (Subsampling2DConnection) l.getConnections().get(2);
View Full Code Here

  assertEquals(2, sc.getFilters(), 0);
    }

    public void testCNNLayerCalculatorConstruction() {
  Environment.getInstance().setUseWeightsSharedMemory(true);
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 5, 5, 20, 1 }, { 2, 2 }, { 5, 5, 50, 1 }, { 2, 2 }, {500}, {10} }, true);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));
  NNFactory.lcMaxPooling(nn);

  // feedforwad cc
  LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();

  Layer l = nn.getInputLayer();

  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiMaxPooling2D);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiMaxPooling2D);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorFullyConnected);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorFullyConnected);

  // backpropagation cc
  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(nn, null, null, null, null, 0.01f, 0.5f, 0f, 0f, 0f, 1, 1, 1);
  BackPropagationLayerCalculatorImpl bplc = (BackPropagationLayerCalculatorImpl) bpt.getBPLayerCalculator();

  l = nn.getInputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);   // bias

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) == null);

  // simple convolutional network
  Environment.getInstance().setUseWeightsSharedMemory(true);
  nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 1, 1 }, {10} }, false);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));
  NNFactory.lcMaxPooling(nn);

  l = nn.getInputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiSubsampling2D);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiSigmoid);

  bpt = TrainerFactory.backPropagation(nn, null, null, new MultipleNeuronsOutputError(), null, 0.02f, 0.5f, 0f, 0f, 0f, 1, 1, 1);
  bplc = (BackPropagationLayerCalculatorImpl) bpt.getBPLayerCalculator();

  l = nn.getInputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);
    }
View Full Code Here

    }

    @Test
    public void testSimpleCNN() {
  Environment.getInstance().setUseWeightsSharedMemory(true);
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] {{3, 3, 2}, {2, 2, 2, 1}, {2, 2}}, false);
  nn.setLayerCalculator(NNFactory.lcWeightedSum(nn, null));
  NNFactory.lcMaxPooling(nn);

  Conv2DConnection c = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  c.getWeights().setElements(new float[] {1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4});

  ValuesProvider vp = TensorFactory.tensorProvider(nn, 1, true);
  TensorIterator it = vp.get(c.getInputLayer()).iterator();
  for (int i = 0; i < vp.get(c.getInputLayer()).getSize(); i++) {
      vp.get(c.getInputLayer()).getElements()[it.next()] = i + 1;
  }

  Set<Layer> calculatedLayers = new HashSet<>();
  calculatedLayers.add(nn.getInputLayer());
  nn.getLayerCalculator().calculate(nn, nn.getOutputLayer(), calculatedLayers, vp);

  Tensor o = vp.get(nn.getOutputLayer());

  assertEquals(244, o.get(0, 0, 0, 0), 0);
  assertEquals(244, o.get(1, 0, 0, 0), 0);
    }
View Full Code Here

    @Test
    public void testCNNBackpropagation() {
  //Environment.getInstance().setExecutionMode(EXECUTION_MODE.SEQ);

  Environment.getInstance().setUseWeightsSharedMemory(true);
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 3, 3, 2 }, { 2, 2, 1, 1 } }, true);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));

  Conv2DConnection c = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  TensorIterator it = c.getWeights().iterator();
  float x = 0.1f;
  while (it.hasNext()) {
      c.getWeights().getElements()[it.next()] = x;
      x += 0.1f;
  }

  Conv2DConnection b = (Conv2DConnection) nn.getOutputLayer().getConnections().get(1);
  b.getWeights().getElements()[b.getWeights().getStartIndex()] = -3f;

  SimpleInputProvider ts = new SimpleInputProvider(new float[][] { { 0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f, 0.7f, 0.8f, 0.9f, 1, 1.1f, 1.2f, 1.3f, 1.4f, 1.5f, 1.6f, 1.7f, 1.8f } }, new float[][] { { 1, 1, 1, 1 } });
  BackPropagationTrainer<?> t = TrainerFactory.backPropagation(nn, ts, null, null, null, 0.5f, 0f, 0f, 0f, 0f, 1, 1, 1);
  t.train();
View Full Code Here

    }

    @Test
    public void testCNNBackpropagation2() {
  Environment.getInstance().setUseWeightsSharedMemory(true);
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { {2, 1, 1}, {1, 1}, {2}, {2}, {1} }, false);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));
  NNFactory.lcMaxPooling(nn);

  FullyConnected c1 = (FullyConnected) nn.getInputLayer().getConnections().get(0).getOutputLayer().getConnections().get(1).getOutputLayer().getConnections().get(1);
  Matrix cg1 = c1.getWeights();
  cg1.set(0.1f, 0, 0);
  cg1.set(0.8f, 0, 1);
  cg1.set(0.4f, 1, 0);
  cg1.set(0.6f, 1, 1);

  FullyConnected c2 = (FullyConnected) nn.getOutputLayer().getConnections().iterator().next();
  Matrix cg2 = c2.getWeights();
  cg2.set(0.3f, 0, 0);
  cg2.set(0.9f, 0, 1);

  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(nn, new SimpleInputProvider(new float[][] { { 0.35f, 0.9f } }, new float[][] { { 0.5f } }), new SimpleInputProvider(new float[][] { { 0.35f, 0.9f } }, new float[][] { { 0.5f } }), null, null, 1f, 0f, 0f, 0f, 0f, 1, 1, 1);
View Full Code Here

TOP

Related Classes of com.github.neuralnetworks.architecture.NeuralNetworkImpl

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.