Package com.github.neuralnetworks.util

Examples of com.github.neuralnetworks.util.Properties


  return new BackPropagationAutoencoder(p);
    }

    protected static Properties backpropProperties(NeuralNetworkImpl nn, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error, NNRandomInitializer rand, float learningRate, float momentum, float l1weightDecay, float l2weightDecay, float dropoutRate, int trainingBatchSize, int testBatchSize, int epochs) {
  Properties p = new Properties();
  p.setParameter(Constants.NEURAL_NETWORK, nn);
  p.setParameter(Constants.TRAINING_INPUT_PROVIDER, trainingSet);
  p.setParameter(Constants.TESTING_INPUT_PROVIDER, testingSet);
  p.setParameter(Constants.LEARNING_RATE, learningRate);
  p.setParameter(Constants.MOMENTUM, momentum);
  p.setParameter(Constants.L1_WEIGHT_DECAY, l1weightDecay);
  p.setParameter(Constants.L2_WEIGHT_DECAY, l2weightDecay);
  p.setParameter(Constants.OUTPUT_ERROR_DERIVATIVE, new MSEDerivative());
  p.setParameter(Constants.WEIGHT_UDPATES, weightUpdates(nn));
  p.setParameter(Constants.OUTPUT_ERROR, error);
  p.setParameter(Constants.RANDOM_INITIALIZER, rand);
  p.setParameter(Constants.TRAINING_BATCH_SIZE, trainingBatchSize);
  p.setParameter(Constants.TEST_BATCH_SIZE, testBatchSize);
  p.setParameter(Constants.EPOCHS, epochs);
  p.setParameter(Constants.DROPOUT_RATE, dropoutRate);

  return p;
    }
View Full Code Here


  RBMLayerCalculator lc = NNFactory.rbmSigmoidSigmoid(rbm, trainingBatchSize);
  return new AparapiCDTrainer(rbmProperties(rbm, lc, trainingSet, testingSet, error, rand, learningRate, momentum, l1weightDecay, l2weightDecay, gibbsSampling, trainingBatchSize, epochs, isPersistentCD));
    }

    protected static Properties rbmProperties(RBM rbm, RBMLayerCalculator lc, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error, NNRandomInitializer rand, float learningRate, float momentum, float l1weightDecay, float l2weightDecay, int gibbsSampling, int trainingBatchSize, int epochs, boolean isPersistentCD) {
  Properties p = new Properties();
  p.setParameter(Constants.NEURAL_NETWORK, rbm);
  p.setParameter(Constants.TRAINING_INPUT_PROVIDER, trainingSet);
  p.setParameter(Constants.TESTING_INPUT_PROVIDER, testingSet);
  p.setParameter(Constants.LEARNING_RATE, learningRate);
  p.setParameter(Constants.MOMENTUM, momentum);
  p.setParameter(Constants.L1_WEIGHT_DECAY, l1weightDecay);
  p.setParameter(Constants.L2_WEIGHT_DECAY, l2weightDecay);
  p.setParameter(Constants.GIBBS_SAMPLING_COUNT, gibbsSampling);
  p.setParameter(Constants.OUTPUT_ERROR, error);
  p.setParameter(Constants.RANDOM_INITIALIZER, rand);
  p.setParameter(Constants.PERSISTENT_CD, isPersistentCD);
  p.setParameter(Constants.LAYER_CALCULATOR, lc);
  p.setParameter(Constants.TRAINING_BATCH_SIZE, trainingBatchSize);
  p.setParameter(Constants.EPOCHS, epochs);

  return p;
    }
View Full Code Here

  return result;
    }

    protected static Properties layerTrainerProperties(DNN<?> dnn, Map<NeuralNetwork, OneStepTrainer<?>> layerTrainers, TrainingInputProvider trainingSet, TrainingInputProvider testingSet, OutputError error) {
  Properties p = new Properties();
  p.setParameter(Constants.NEURAL_NETWORK, dnn);
  p.setParameter(Constants.TRAINING_INPUT_PROVIDER, trainingSet);
  p.setParameter(Constants.TESTING_INPUT_PROVIDER, testingSet);
  p.setParameter(Constants.OUTPUT_ERROR, error);
  p.setParameter(Constants.LAYER_TRAINERS, layerTrainers);

  return p;
    }
View Full Code Here

TOP

Related Classes of com.github.neuralnetworks.util.Properties

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.