Package org.apache.hama.ml.math

Examples of org.apache.hama.ml.math.DenseDoubleVector


   * The model meta-data is stored in memory.
   */
  public DoubleVector output(DoubleVector featureVector) {
    List<double[]> outputCache = this.outputInternal(featureVector);
    // the output of the last layer is the output of the MLP
    return new DenseDoubleVector(outputCache.get(outputCache.size() - 1));
  }
View Full Code Here


    double[] trainingFeature = Arrays.copyOfRange(trainingVec, 0,
        this.layerSizeArray[0]);
    double[] trainingLabels = Arrays.copyOfRange(trainingVec,
        this.layerSizeArray[0], trainingVec.length);

    DoubleVector trainingFeatureVec = new DenseDoubleVector(trainingFeature);
    List<double[]> outputCache = this.outputInternal(trainingFeatureVec);

    // calculate the delta of output layer
    double[] delta = new double[this.layerSizeArray[this.layerSizeArray.length - 1]];
    double[] outputLayerOutput = outputCache.get(outputCache.size() - 1);
View Full Code Here

      e.printStackTrace();
    }

    // initial the mlp with existing model meta-data and get the output
    MultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(modelPath);
    DoubleVector input = new DenseDoubleVector(new double[] { 1, 2, 3 });
    try {
      DoubleVector result = mlp.output(input);
      assertArrayEquals(new double[] { 0.6636557, 0.7009963, 0.7213835 },
          result.toArray(), 0.0001);
    } catch (Exception e1) {
View Full Code Here

   */
  @Test
  public void testTrainWithSquaredError() {
    // generate training data
    DoubleVector[] trainingData = new DenseDoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1, 1 }),
        new DenseDoubleVector(new double[] { 1, 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 1, 0 }) };

    // set parameters
    double learningRate = 0.5;
    double regularization = 0.02; // no regularization
    double momentum = 0; // no momentum
    String squashingFunctionName = "Sigmoid";
    String costFunctionName = "SquaredError";
    int[] layerSizeArray = new int[] { 2, 5, 1 };
    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
        regularization, momentum, squashingFunctionName, costFunctionName,
        layerSizeArray);

    try {
      // train by multiple instances
      Random rnd = new Random();
      for (int i = 0; i < 30000; ++i) {
        DenseDoubleMatrix[] weightUpdates = mlp
            .trainByInstance(trainingData[rnd.nextInt(4)]);
        mlp.updateWeightMatrices(weightUpdates);
      }

      // System.out.printf("Weight matrices: %s\n",
      // mlp.weightsToString(mlp.getWeightMatrices()));
      for (int i = 0; i < trainingData.length; ++i) {
        DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
            .slice(2);
        assertEquals(trainingData[i].toArray()[2], mlp.output(testVec)
            .toArray()[0], 0.2);
      }
    } catch (Exception e) {
View Full Code Here

   */
  @Test
  public void testTrainWithCrossEntropy() {
    // generate training data
    DoubleVector[] trainingData = new DenseDoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1, 1 }),
        new DenseDoubleVector(new double[] { 1, 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 1, 0 }) };

    // set parameters
    double learningRate = 0.5;
    double regularization = 0.0; // no regularization
    double momentum = 0; // no momentum
    String squashingFunctionName = "Sigmoid";
    String costFunctionName = "CrossEntropy";
    int[] layerSizeArray = new int[] { 2, 7, 1 };
    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
        regularization, momentum, squashingFunctionName, costFunctionName,
        layerSizeArray);

    try {
      // train by multiple instances
      Random rnd = new Random();
      for (int i = 0; i < 20000; ++i) {
        DenseDoubleMatrix[] weightUpdates = mlp
            .trainByInstance(trainingData[rnd.nextInt(4)]);
        mlp.updateWeightMatrices(weightUpdates);
      }

      // System.out.printf("Weight matrices: %s\n",
      // mlp.weightsToString(mlp.getWeightMatrices()));
      for (int i = 0; i < trainingData.length; ++i) {
        DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
            .slice(2);
        assertEquals(trainingData[i].toArray()[2], mlp.output(testVec)
            .toArray()[0], 0.2);
      }
    } catch (Exception e) {
View Full Code Here

   */
  @Test
  public void testWithRegularization() {
    // generate training data
    DoubleVector[] trainingData = new DenseDoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1, 1 }),
        new DenseDoubleVector(new double[] { 1, 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 1, 0 }) };

    // set parameters
    double learningRate = 0.5;
    double regularization = 0.02; // regularization should be a tiny number
    double momentum = 0; // no momentum
    String squashingFunctionName = "Sigmoid";
    String costFunctionName = "CrossEntropy";
    int[] layerSizeArray = new int[] { 2, 7, 1 };
    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
        regularization, momentum, squashingFunctionName, costFunctionName,
        layerSizeArray);

    try {
      // train by multiple instances
      Random rnd = new Random();
      for (int i = 0; i < 10000; ++i) {
        DenseDoubleMatrix[] weightUpdates = mlp
            .trainByInstance(trainingData[rnd.nextInt(4)]);
        mlp.updateWeightMatrices(weightUpdates);
      }

      // System.out.printf("Weight matrices: %s\n",
      // mlp.weightsToString(mlp.getWeightMatrices()));
      for (int i = 0; i < trainingData.length; ++i) {
        DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
            .slice(2);
        assertEquals(trainingData[i].toArray()[2], mlp.output(testVec)
            .toArray()[0], 0.2);
      }
    } catch (Exception e) {
View Full Code Here

   */
  @Test
  public void testWithMomentum() {
    // generate training data
    DoubleVector[] trainingData = new DenseDoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1, 1 }),
        new DenseDoubleVector(new double[] { 1, 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 1, 0 }) };

    // set parameters
    double learningRate = 0.5;
    double regularization = 0.02; // regularization should be a tiny number
    double momentum = 0.5; // no momentum
    String squashingFunctionName = "Sigmoid";
    String costFunctionName = "CrossEntropy";
    int[] layerSizeArray = new int[] { 2, 7, 1 };
    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
        regularization, momentum, squashingFunctionName, costFunctionName,
        layerSizeArray);

    try {
      // train by multiple instances
      Random rnd = new Random();
      for (int i = 0; i < 3000; ++i) {
        DenseDoubleMatrix[] weightUpdates = mlp
            .trainByInstance(trainingData[rnd.nextInt(4)]);
        mlp.updateWeightMatrices(weightUpdates);
      }

      // System.out.printf("Weight matrices: %s\n",
      // mlp.weightsToString(mlp.getWeightMatrices()));
      for (int i = 0; i < trainingData.length; ++i) {
        DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
            .slice(2);
        assertEquals(trainingData[i].toArray()[2], mlp.output(testVec)
            .toArray()[0], 0.2);
      }
    } catch (Exception e) {
View Full Code Here

    String strDataPath = "/tmp/xor-training-by-xor";
    Path dataPath = new Path(strDataPath);

    // generate training data
    DoubleVector[] trainingData = new DenseDoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1, 1 }),
        new DenseDoubleVector(new double[] { 1, 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 1, 0 }) };

    try {
      URI uri = new URI(strDataPath);
      FileSystem fs = FileSystem.get(uri, conf);
      fs.delete(dataPath, true);
      if (!fs.exists(dataPath)) {
        fs.createNewFile(dataPath);
        SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
            dataPath, LongWritable.class, VectorWritable.class);

        for (int i = 0; i < 1000; ++i) {
          VectorWritable vecWritable = new VectorWritable(trainingData[i % 4]);
          writer.append(new LongWritable(i), vecWritable);
        }
        writer.close();
      }

    } catch (Exception e) {
      e.printStackTrace();
    }

    // begin training
    String modelPath = "/tmp/xorModel-training-by-xor.data";
    double learningRate = 0.6;
    double regularization = 0.02; // no regularization
    double momentum = 0.3; // no momentum
    String squashingFunctionName = "Tanh";
    String costFunctionName = "SquaredError";
    int[] layerSizeArray = new int[] { 2, 5, 1 };
    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
        regularization, momentum, squashingFunctionName, costFunctionName,
        layerSizeArray);

    Map<String, String> trainingParams = new HashMap<String, String>();
    trainingParams.put("training.iteration", "1000");
    trainingParams.put("training.mode", "minibatch.gradient.descent");
    trainingParams.put("training.batch.size", "100");
    trainingParams.put("tasks", "3");
    trainingParams.put("modelPath", modelPath);

    try {
      mlp.train(dataPath, trainingParams);
    } catch (Exception e) {
      e.printStackTrace();
    }

    // test the model
    for (int i = 0; i < trainingData.length; ++i) {
      DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i].slice(2);
      try {
        DenseDoubleVector actual = (DenseDoubleVector) mlp.output(testVec);
        assertEquals(trainingData[i].toArray()[2], actual.get(0), 0.2);
      } catch (Exception e) {
        e.printStackTrace();
      }
    }
  }
View Full Code Here

public class LinearRegressionModelTest {

  @Test
  public void testCorrectCostCalculation() throws Exception {
    LinearRegressionModel linearRegressionModel = new LinearRegressionModel();
    DoubleVector x = new DenseDoubleVector(new double[]{2, 3, 4});
    double y = 1;
    DoubleVector theta = new DenseDoubleVector(new double[]{1, 1, 1});
    Double cost = linearRegressionModel.calculateCostForItem(x, y, 2, theta);
    assertEquals("wrong cost calculation for linear regression", Double.valueOf(16d), cost);
  }
View Full Code Here

  }

  @Test
  public void testCorrectHypothesisCalculation() throws Exception {
    LinearRegressionModel linearRegressionModel = new LinearRegressionModel();
    Double hypothesisValue = linearRegressionModel.applyHypothesis(new DenseDoubleVector(new double[]{1, 1, 1}),
            new DenseDoubleVector(new double[]{2, 3, 4}));
    assertEquals("wrong hypothesis value for linear regression", Double.valueOf(9), hypothesisValue);
  }
View Full Code Here

TOP

Related Classes of org.apache.hama.ml.math.DenseDoubleVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.