Package com.github.neuralnetworks.training.backpropagation

Examples of com.github.neuralnetworks.training.backpropagation.BackPropagationLayerCalculatorImpl


     * @return
     */
    public static BackPropagationTrainer<?> backPropagation(NeuralNetworkImpl nn, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error, NNRandomInitializer rand, float learningRate, float momentum, float l1weightDecay, float l2weightDecay) {
  BackPropagationTrainer<?> t = new BackPropagationTrainer<NeuralNetwork>(backpropProperties(nn, trainingSet, testingSet, error, rand, learningRate, momentum, l1weightDecay, l2weightDecay));

  BackPropagationLayerCalculatorImpl bplc = bplc(nn, t.getProperties());
  t.getProperties().setParameter(Constants.BACKPROPAGATION, bplc);

  return t;
    }
View Full Code Here


  return t;
    }

    private static BackPropagationLayerCalculatorImpl bplc(NeuralNetworkImpl nn, Properties p) {
  BackPropagationLayerCalculatorImpl blc = new BackPropagationLayerCalculatorImpl();
  LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();

  List<ConnectionCandidate> connections = new BreadthFirstOrderStrategy(nn, nn.getOutputLayer()).order();

  if (connections.size() > 0) {
      Layer current = null;
      List<Connections> chunk = new ArrayList<>();
      Set<Layer> convCalculatedLayers = new HashSet<>(); // tracks
                     // convolutional
                     // layers
                     // (because their
                     // calculations
                     // are
                     // interlinked)
      convCalculatedLayers.add(nn.getOutputLayer());

      for (int i = 0; i < connections.size(); i++) {
    ConnectionCandidate c = connections.get(i);
    chunk.add(c.connection);

    if (i == connections.size() - 1 || connections.get(i + 1).target != c.target) {
        current = c.target;

        ConnectionCalculator result = null;
        ConnectionCalculator ffcc = null;
        if (Util.isBias(current)) {
      ffcc = lc.getConnectionCalculator(current.getConnections().get(0).getOutputLayer());
        } else if (Util.isConvolutional(current) || Util.isSubsampling(current)) {
      if (chunk.size() != 1) {
          throw new IllegalArgumentException("Convolutional layer with more than one connection");
      }

      ffcc = lc.getConnectionCalculator(Util.getOppositeLayer(chunk.iterator().next(), current));
        } else {
      ffcc = lc.getConnectionCalculator(current);
        }

        if (ffcc instanceof AparapiSigmoid) {
      result = new BackPropagationSigmoid(p);
        } else if (ffcc instanceof AparapiTanh) {
      result = new BackPropagationTanh(p);
        } else if (ffcc instanceof AparapiSoftReLU) {
      result = new BackPropagationSoftReLU(p);
        } else if (ffcc instanceof AparapiReLU) {
      result = new BackPropagationReLU(p);
        } else if (ffcc instanceof AparapiMaxPooling2D || ffcc instanceof AparapiStochasticPooling2D) {
      result = new BackpropagationMaxPooling2D();
        } else if (ffcc instanceof AparapiAveragePooling2D) {
      result = new BackpropagationAveragePooling2D();
        } else if (ffcc instanceof ConnectionCalculatorConv) {
      Layer opposite = Util.getOppositeLayer(chunk.iterator().next(), current);
      if (!convCalculatedLayers.contains(opposite)) {
          convCalculatedLayers.add(opposite);

          if (ffcc instanceof AparapiConv2DSigmoid) {
        result = new BackPropagationConv2DSigmoid(p);
          } else if (ffcc instanceof AparapiConv2DTanh) {
        result = new BackPropagationConv2DTanh(p);
          } else if (ffcc instanceof AparapiConv2DSoftReLU) {
        result = new BackPropagationConv2DSoftReLU(p);
          } else if (ffcc instanceof AparapiConv2DReLU) {
        result = new BackPropagationConv2DReLU(p);
          }
      } else {
          result = new BackPropagationConv2D(p);
      }
        }

        if (result != null) {
      blc.addConnectionCalculator(current, result);
        }

        chunk.clear();
    }
      }
View Full Code Here

    }

    public static BackPropagationAutoencoder backPropagationAutoencoder(NeuralNetworkImpl nn, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error, NNRandomInitializer rand, float learningRate, float momentum, float l1weightDecay, float l2weightDecay, float inputCorruptionRate) {
  BackPropagationAutoencoder t = new BackPropagationAutoencoder(backpropProperties(nn, trainingSet, testingSet, error, rand, learningRate, momentum, l1weightDecay, l2weightDecay));

  BackPropagationLayerCalculatorImpl bplc = bplc(nn, t.getProperties());
  t.getProperties().setParameter(Constants.BACKPROPAGATION, bplc);

  return t;
    }
View Full Code Here

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorFullyConnected);

  // backpropagation cc
  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(nn, null, null, null, null, 0.01f, 0.5f, 0f, 0f);
  BackPropagationLayerCalculatorImpl bplc = (BackPropagationLayerCalculatorImpl) bpt.getBPLayerCalculator();

  l = nn.getInputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);   // bias

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) == null);

  // simple convolutional network
  nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 1, 1 }, {10} }, false);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));
  NNFactory.lcMaxPooling(nn);
View Full Code Here

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorFullyConnected);

  // backpropagation cc
  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(nn, null, null, null, null, 0.01f, 0.5f, 0f, 0f, 0f, 1, 1, 1);
  BackPropagationLayerCalculatorImpl bplc = (BackPropagationLayerCalculatorImpl) bpt.getBPLayerCalculator();

  l = nn.getInputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);   // bias

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) == null);

  // simple convolutional network
  Environment.getInstance().setUseWeightsSharedMemory(true);
  nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 1, 1 }, {10} }, false);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));
View Full Code Here

  return new BackPropagationTrainer<NeuralNetwork>(p);
    }

    private static BackPropagationLayerCalculatorImpl bplc(NeuralNetworkImpl nn, Properties p) {
  BackPropagationLayerCalculatorImpl blc = new BackPropagationLayerCalculatorImpl();
  LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();

  List<ConnectionCandidate> connections = new BreadthFirstOrderStrategy(nn, nn.getOutputLayer()).order();

  if (connections.size() > 0) {
      Layer current = null;
      List<Connections> chunk = new ArrayList<>();
      Set<Layer> convCalculatedLayers = new HashSet<>(); // tracks
                     // convolutional
                     // layers
                     // (because their
                     // calculations
                     // are
                     // interlinked)
      convCalculatedLayers.add(nn.getOutputLayer());

      for (int i = 0; i < connections.size(); i++) {
    ConnectionCandidate c = connections.get(i);
    chunk.add(c.connection);

    if (i == connections.size() - 1 || connections.get(i + 1).target != c.target) {
        current = c.target;

        ConnectionCalculator result = null;
        ConnectionCalculator ffcc = null;
        if (Util.isBias(current)) {
      ffcc = lc.getConnectionCalculator(current.getConnections().get(0).getOutputLayer());
        } else if (Util.isConvolutional(current) || Util.isSubsampling(current)) {
      if (chunk.size() != 1) {
          throw new IllegalArgumentException("Convolutional layer with more than one connection");
      }

      ffcc = lc.getConnectionCalculator(Util.getOppositeLayer(chunk.iterator().next(), current));
        } else {
      ffcc = lc.getConnectionCalculator(current);
        }

        if (ffcc instanceof AparapiSigmoid) {
      result = new BackPropagationSigmoid(p);
        } else if (ffcc instanceof AparapiTanh) {
      result = new BackPropagationTanh(p);
        } else if (ffcc instanceof AparapiSoftReLU) {
      result = new BackPropagationSoftReLU(p);
        } else if (ffcc instanceof AparapiReLU) {
      result = new BackPropagationReLU(p);
        } else if (ffcc instanceof AparapiMaxout) {
      result = new BackpropagationMaxout(p);
        } else if (ffcc instanceof AparapiMaxPooling2D || ffcc instanceof AparapiStochasticPooling2D) {
      result = new BackpropagationMaxPooling2D();
        } else if (ffcc instanceof AparapiAveragePooling2D) {
      result = new BackpropagationAveragePooling2D();
        } else if (ffcc instanceof ConnectionCalculatorConv) {
      Layer opposite = Util.getOppositeLayer(chunk.iterator().next(), current);
      if (!convCalculatedLayers.contains(opposite)) {
          convCalculatedLayers.add(opposite);

          if (ffcc instanceof AparapiConv2DSigmoid) {
        result = new BackPropagationConv2DSigmoid(p);
          } else if (ffcc instanceof AparapiConv2DTanh) {
        result = new BackPropagationConv2DTanh(p);
          } else if (ffcc instanceof AparapiConv2DSoftReLU) {
        result = new BackPropagationConv2DSoftReLU(p);
          } else if (ffcc instanceof AparapiConv2DReLU) {
        result = new BackPropagationConv2DReLU(p);
          }
      } else {
          result = new BackPropagationConv2D(p);
      }
        }

        if (result != null) {
      blc.addConnectionCalculator(current, result);
        }

        chunk.clear();
    }
      }
View Full Code Here

TOP

Related Classes of com.github.neuralnetworks.training.backpropagation.BackPropagationLayerCalculatorImpl

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.