Examples of NeuralNetwork


Examples of com.github.neuralnetworks.architecture.NeuralNetwork

      isTraining = false;
  } else if (event instanceof MiniBatchFinishedEvent && isTraining) {
      MiniBatchFinishedEvent mbe = (MiniBatchFinishedEvent) event;
      if (mbe.getBatchCount() % validationFrequency == 0) {
    OneStepTrainer<?> t = (OneStepTrainer<?>) event.getSource();
    NeuralNetwork n = t.getNeuralNetwork();

    if (n.getLayerCalculator() != null) {
        Set<Layer> calculatedLayers = new UniqueList<>();
        TrainingInputData input = null;
        OutputError outputError = t.getOutputError();
        outputError.reset();
        inputProvider.reset();

        while ((input = inputProvider.getNextInput()) != null) {
      calculatedLayers.clear();
      calculatedLayers.add(n.getInputLayer());
      ValuesProvider vp = mbe.getResults();
      if (vp == null) {
          vp = new ValuesProvider();
      }

      vp.addValues(n.getInputLayer(), input.getInput());
      n.getLayerCalculator().calculate(n, n.getOutputLayer(), calculatedLayers, vp);

      outputError.addItem(vp.getValues(n.getOutputLayer()), input.getTarget());
        }

        float e = outputError.getTotalNetworkError();
        if (e <= acceptanceError) {
      System.out.println("Stopping at error " + e + " (" + (e * 100) + "%) for " + mbe.getBatchCount() + " minibatches");
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetwork

    /**
     * The network is tested via the testing input provider and the training error is aggregated for each example.
     */
    public void test() {
  TrainingInputProvider ip = getTestingInputProvider();
  NeuralNetwork n = getNeuralNetwork();

  if (ip != null && n != null && n.getLayerCalculator() != null) {
      ip.reset();

      triggerEvent(new TestingStartedEvent(this));

      Set<Layer> calculatedLayers = new UniqueList<>();
      ValuesProvider results = new ValuesProvider();
      TrainingInputData input = null;

      if (getOutputError() != null) {
    getOutputError().reset();
      }

      while ((input = ip.getNextInput()) != null) {
    calculatedLayers.clear();
    calculatedLayers.add(n.getInputLayer());
    results.addValues(n.getInputLayer(), input.getInput());
    n.getLayerCalculator().calculate(n, n.getOutputLayer(), calculatedLayers, results);

    if (getOutputError() != null) {
        getOutputError().addItem(results.getValues(n.getOutputLayer()), input.getTarget());
    }

    triggerEvent(new MiniBatchFinishedEvent(this, input, results, null));
      }
     
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetwork

  propagateForward(data.getInput());
  propagateBackward(data.getTarget());
    }

    public void propagateForward(Matrix input) {
  NeuralNetwork nn = getNeuralNetwork();
  Set<Layer> calculatedLayers = new UniqueList<Layer>();
  calculatedLayers.add(nn.getInputLayer());
  activations.addValues(nn.getInputLayer(), input);
  nn.getLayerCalculator().calculate(nn, nn.getOutputLayer(), calculatedLayers, activations);
    }
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetwork

  activations.addValues(nn.getInputLayer(), input);
  nn.getLayerCalculator().calculate(nn, nn.getOutputLayer(), calculatedLayers, activations);
    }

    public void propagateBackward(Matrix target) {
  NeuralNetwork nn = getNeuralNetwork();

  OutputErrorDerivative d = getProperties().getParameter(Constants.OUTPUT_ERROR_DERIVATIVE);
  Matrix outputErrorDerivative = d.getOutputErrorDerivative(activations.getValues(nn.getOutputLayer()), target);
  backpropagation.addValues(nn.getOutputLayer(), outputErrorDerivative);
  Set<Layer> calculatedLayers = new UniqueList<Layer>();
  calculatedLayers.add(nn.getOutputLayer());
  BackPropagationLayerCalculator blc = getBPLayerCalculator();
  blc.backpropagate(nn, calculatedLayers, activations, backpropagation);
    }
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetwork

      isTraining = false;
  } else if (event instanceof MiniBatchFinishedEvent && isTraining) {
      MiniBatchFinishedEvent mbe = (MiniBatchFinishedEvent) event;
      if (mbe.getBatchCount() % validationFrequency == 0) {
    OneStepTrainer<?> t = (OneStepTrainer<?>) event.getSource();
    NeuralNetwork n = t.getNeuralNetwork();

    if (n.getLayerCalculator() != null) {
        OutputError outputError = t.getOutputError();
        outputError.reset();
        inputProvider.reset();

        ValuesProvider vp = mbe.getResults();
        if (vp == null) {
      vp = TensorFactory.tensorProvider(n, 1, Environment.getInstance().getUseDataSharedMemory());
        }
        if (vp.get(outputError) == null) {
      vp.add(outputError, vp.get(n.getOutputLayer()).getDimensions());
        }
        TrainingInputData input = new TrainingInputDataImpl(vp.get(n.getInputLayer()), vp.get(outputError));

        Set<Layer> calculatedLayers = new UniqueList<>();
        for (int i = 0; i < inputProvider.getInputSize(); i++) {
      inputProvider.populateNext(input);
      calculatedLayers.clear();
      calculatedLayers.add(n.getInputLayer());

      n.getLayerCalculator().calculate(n, n.getOutputLayer(), calculatedLayers, vp);

      outputError.addItem(vp.get(n.getOutputLayer()), input.getTarget());
        }

        float e = outputError.getTotalNetworkError();
        if (e <= acceptanceError) {
      System.out.println("Stopping at error " + e + " (" + (e * 100) + "%) for " + mbe.getBatchCount() + " minibatches");
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetwork

     * After that the error is backpropagated (via BackPropagationLayerCalculator blc).
     */
    @Override
    protected void learnInput(int batch) {
  // forward
  NeuralNetwork nn = getNeuralNetwork();
  Set<Layer> calculatedLayers = new UniqueList<Layer>();
  calculatedLayers.add(nn.getInputLayer());
  nn.getLayerCalculator().calculate(nn, nn.getOutputLayer(), calculatedLayers, activations);

  // backward
  OutputErrorDerivative d = getProperties().getParameter(Constants.OUTPUT_ERROR_DERIVATIVE);
  d.getOutputErrorDerivative(activations.get(nn.getOutputLayer()), activations.get(d), backpropagation.get(nn.getOutputLayer()));
  calculatedLayers.clear();
  calculatedLayers.add(nn.getOutputLayer());
  BackPropagationLayerCalculator blc = getBPLayerCalculator();
  blc.backpropagate(nn, calculatedLayers, activations, backpropagation);
    }
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetwork

    public void handleEvent(TrainingEvent event) {
  if (event instanceof TrainingFinishedEvent) {
      float dropoutRate = properties.getParameter(Constants.DROPOUT_RATE);

      if (dropoutRate > 0) {
    NeuralNetwork nn = getNeuralNetwork();

    LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();
    nn.getConnections().stream().filter(c -> c instanceof FullyConnected && c.getInputLayer() != nn.getInputLayer() && !Util.isBias(c.getInputLayer())).forEach(c -> {
        ConnectionCalculatorFullyConnected cc = (ConnectionCalculatorFullyConnected) lc.getConnectionCalculator(c.getOutputLayer());
        cc.setDropoutRate(0);
        FullyConnected fc = (FullyConnected) c;
        fc.getWeights().forEach(i -> fc.getWeights().getElements()[i] = fc.getWeights().getElements()[i] * (1 - dropoutRate));
    });
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetwork

    protected ValuesProvider backpropagation;
    protected TrainingInputData input;

    public BackPropagationTrainer(Properties properties) {
  super(properties);
  NeuralNetwork nn = getNeuralNetwork();
  activations = TensorFactory.tensorProvider(nn, getTrainingBatchSize(), Environment.getInstance().getUseDataSharedMemory());
  activations.add(getProperties().getParameter(Constants.OUTPUT_ERROR_DERIVATIVE), activations.get(getNeuralNetwork().getOutputLayer()).getDimensions());
  backpropagation = TensorFactory.tensorProvider(nn, getTrainingBatchSize(), Environment.getInstance().getUseDataSharedMemory());

  float dropoutRate = properties.getParameter(Constants.DROPOUT_RATE);

  if (dropoutRate > 0) {
      LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();
      nn.getConnections().stream().filter(c -> c instanceof FullyConnected && c.getInputLayer() != nn.getInputLayer() && !Util.isBias(c.getInputLayer())).forEach(c -> {
    ConnectionCalculatorFullyConnected cc = (ConnectionCalculatorFullyConnected) lc.getConnectionCalculator(c.getOutputLayer());
    cc.setDropoutRate(dropoutRate);
      });

      addEventListener(this);
View Full Code Here

Examples of com.wiieditor.neuralnetwork.NeuralNetwork

        downSample();
        double [] output=DataInterpolator.convertCharToNeuronDoubleArr(c);
        NeuralData neuralData=gridPanel.getData().getConvertedNeuralData();
        neuralData.setOutputVector(output);
       
        NeuralNetwork n=NeuralNetwork.createNeuralNetwork();
        n.learn(neuralData);
        n.save();
       
        log.info("Entry: NeuralNetwork learnt for "+c);
       
    }
View Full Code Here

Examples of com.wiieditor.neuralnetwork.NeuralNetwork

    }
   
    public char recognize(){
        downSample();
        NeuralData neuralData=gridPanel.getData().getConvertedNeuralData();
        NeuralNetwork n=NeuralNetwork.createNeuralNetwork();
        neuralData=n.calculateAndGetNormalizedOuput(neuralData);
       
        char c=DataInterpolator.convertNeuronDoubleArrToChar(neuralData.getOutputVector());
        log.info("Entry: NeuralNetwork recognized "+c);
        return c;
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.