Package com.github.neuralnetworks.calculation

Examples of com.github.neuralnetworks.calculation.OutputError


      StringBuilder sb = new StringBuilder();
      sb.append(((finishTime - startTime) / 1000f) + " s  total time" + s);
      sb.append((miniBatchTotalTime / (miniBatches * 1000f)) + " s  per minibatch of " + miniBatches + " mini batches" + s);
      if (event instanceof TestingFinishedEvent) {
    Trainer<?> t = (Trainer<?>) event.getSource();
    OutputError oe = t.getOutputError();
    sb.append(oe.getTotalErrorSamples() + "/" + oe.getTotalInputSize() + " samples (" + oe.getTotalNetworkError() + ", " + (oe.getTotalNetworkError() * 100) + "%) error" + s + s);
      }

      System.out.print(sb.toString());
  } else if (event instanceof MiniBatchFinishedEvent) {
      miniBatches++;
View Full Code Here


    NeuralNetwork n = t.getNeuralNetwork();

    if (n.getLayerCalculator() != null) {
        Set<Layer> calculatedLayers = new UniqueList<>();
        TrainingInputData input = null;
        OutputError outputError = t.getOutputError();
        outputError.reset();
        inputProvider.reset();

        while ((input = inputProvider.getNextInput()) != null) {
      calculatedLayers.clear();
      calculatedLayers.add(n.getInputLayer());
      ValuesProvider vp = mbe.getResults();
      if (vp == null) {
          vp = new ValuesProvider();
      }

      vp.addValues(n.getInputLayer(), input.getInput());
      n.getLayerCalculator().calculate(n, n.getOutputLayer(), calculatedLayers, vp);

      outputError.addItem(vp.getValues(n.getOutputLayer()), input.getTarget());
        }

        float e = outputError.getTotalNetworkError();
        if (e <= acceptanceError) {
      System.out.println("Stopping at error " + e + " (" + (e * 100) + "%) for " + mbe.getBatchCount() + " minibatches");
      t.stopTraining();
        }
    }
View Full Code Here

  NeuralNetworkImpl mlp = NNFactory.mlpSigmoid(new int[] { 4, 2, 3 }, true);

  // training and testing data providers
  IrisInputProvider trainInputProvider = new IrisInputProvider(150, 300000, new IrisTargetMultiNeuronOutputConverter(), false, true, false);
  IrisInputProvider testInputProvider = new IrisInputProvider(1, 150, new IrisTargetMultiNeuronOutputConverter(), false, true, false);
  OutputError outputError = new MultipleNeuronsOutputError();

  // trainer
  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(mlp, trainInputProvider, testInputProvider, outputError, new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f), 0.5f), 0.02f, 0.7f, 0f, 0f);

  // log data
View Full Code Here

  // training and testing data providers
  IrisInputProvider trainInputProvider = new IrisInputProvider(new IrisTargetMultiNeuronOutputConverter(), false);
  trainInputProvider.addInputModifier(new ScalingInputFunction(trainInputProvider));
  IrisInputProvider testInputProvider = new IrisInputProvider(new IrisTargetMultiNeuronOutputConverter(), false);
  testInputProvider.addInputModifier(new ScalingInputFunction(testInputProvider));
  OutputError outputError = new MultipleNeuronsOutputError();

  // trainer
  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(mlp, trainInputProvider, testInputProvider, outputError, new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f), 0.5f), 0.02f, 0.7f, 0f, 0f, 0f, 150, 1, 2000);

  // log data
View Full Code Here

  trainInputProvider.addInputModifier(new ScalingInputFunction(trainInputProvider));

  TrainingInputProviderImpl testInputProvider = new CSVInputProvider(new File(inputPath), new File(targetPath));
  testInputProvider.addInputModifier(new ScalingInputFunction(testInputProvider));

  OutputError outputError = new MultipleNeuronsOutputError();

  // trainer
  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(mlp, trainInputProvider, testInputProvider, outputError, new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f), 0.5f), 0.02f, 0.7f, 0f, 0f, 0f, 150, 1, 2000);

  // log data
View Full Code Here

      triggerEvent(new TestingStartedEvent(this));

      Set<Layer> calculatedLayers = new UniqueList<>();
      ValuesProvider results = TensorFactory.tensorProvider(n, getTestBatchSize(), Environment.getInstance().getUseDataSharedMemory());

      OutputError oe = getOutputError();
      if (oe != null) {
    oe.reset();
    results.add(oe, results.get(n.getOutputLayer()).getDimensions());
      }

      TrainingInputData input = new TrainingInputDataImpl(results.get(n.getInputLayer()), results.get(oe));
      for (int i = 0; i < ip.getInputSize(); i += getTestBatchSize()) {
    ip.populateNext(input);
    calculatedLayers.clear();
    calculatedLayers.add(n.getInputLayer());
    n.getLayerCalculator().calculate(n, n.getOutputLayer(), calculatedLayers, results);
   
    if (oe != null) {
        oe.addItem(results.get(n.getOutputLayer()), input.getTarget());
    }
   
    triggerEvent(new MiniBatchFinishedEvent(this, input, results, null));
      }
     
View Full Code Here

      StringBuilder sb = new StringBuilder();
      sb.append(((finishTime - startTime) / 1000f) + " s  total time" + s);
      sb.append((miniBatchTotalTime / (miniBatches * 1000f)) + " s  per minibatch of " + miniBatches + " batches" + s);
      if (event instanceof TestingFinishedEvent) {
    Trainer<?> t = (Trainer<?>) event.getSource();
    OutputError oe = t.getOutputError();
    sb.append(oe.getTotalErrorSamples() + "/" + oe.getTotalInputSize() + " samples (" + oe.getTotalNetworkError() + ", " + (oe.getTotalNetworkError() * 100) + "%) error" + s + s);
      }

      System.out.print(sb.toString());
  } else if (event instanceof MiniBatchFinishedEvent) {
      miniBatches++;
View Full Code Here

      if (mbe.getBatchCount() % validationFrequency == 0) {
    OneStepTrainer<?> t = (OneStepTrainer<?>) event.getSource();
    NeuralNetwork n = t.getNeuralNetwork();

    if (n.getLayerCalculator() != null) {
        OutputError outputError = t.getOutputError();
        outputError.reset();
        inputProvider.reset();

        ValuesProvider vp = mbe.getResults();
        if (vp == null) {
      vp = TensorFactory.tensorProvider(n, 1, Environment.getInstance().getUseDataSharedMemory());
        }
        if (vp.get(outputError) == null) {
      vp.add(outputError, vp.get(n.getOutputLayer()).getDimensions());
        }
        TrainingInputData input = new TrainingInputDataImpl(vp.get(n.getInputLayer()), vp.get(outputError));

        Set<Layer> calculatedLayers = new UniqueList<>();
        for (int i = 0; i < inputProvider.getInputSize(); i++) {
      inputProvider.populateNext(input);
      calculatedLayers.clear();
      calculatedLayers.add(n.getInputLayer());

      n.getLayerCalculator().calculate(n, n.getOutputLayer(), calculatedLayers, vp);

      outputError.addItem(vp.get(n.getOutputLayer()), input.getTarget());
        }

        float e = outputError.getTotalNetworkError();
        if (e <= acceptanceError) {
      System.out.println("Stopping at error " + e + " (" + (e * 100) + "%) for " + mbe.getBatchCount() + " minibatches");
      t.stopTraining();
        }
    }
View Full Code Here

TOP

Related Classes of com.github.neuralnetworks.calculation.OutputError

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.