Package org.apache.hama.ml.math

Examples of org.apache.hama.ml.math.DoubleVector


    double[] trainingFeature = Arrays.copyOfRange(trainingVec, 0,
        this.layerSizeArray[0]);
    double[] trainingLabels = Arrays.copyOfRange(trainingVec,
        this.layerSizeArray[0], trainingVec.length);

    DoubleVector trainingFeatureVec = new DenseDoubleVector(trainingFeature);
    List<double[]> outputCache = this.outputInternal(trainingFeatureVec);

    // calculate the delta of output layer
    double[] delta = new double[this.layerSizeArray[this.layerSizeArray.length - 1]];
    double[] outputLayerOutput = outputCache.get(outputCache.size() - 1);
View Full Code Here


      e.printStackTrace();
    }

    // initial the mlp with existing model meta-data and get the output
    MultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(modelPath);
    DoubleVector input = new DenseDoubleVector(new double[] { 1, 2, 3 });
    try {
      DoubleVector result = mlp.output(input);
      assertArrayEquals(new double[] { 0.6636557, 0.7009963, 0.7213835 },
          result.toArray(), 0.0001);
    } catch (Exception e1) {
      e1.printStackTrace();
    }

    // delete meta-data
View Full Code Here

public class LinearRegressionModelTest {

  @Test
  public void testCorrectCostCalculation() throws Exception {
    LinearRegressionModel linearRegressionModel = new LinearRegressionModel();
    DoubleVector x = new DenseDoubleVector(new double[]{2, 3, 4});
    double y = 1;
    DoubleVector theta = new DenseDoubleVector(new double[]{1, 1, 1});
    Double cost = linearRegressionModel.calculateCostForItem(x, y, 2, theta);
    assertEquals("wrong cost calculation for linear regression", Double.valueOf(16d), cost);
  }
View Full Code Here

    }
  }

  public static DoubleVector readVector(DataInput in) throws IOException {
    int length = in.readInt();
    DoubleVector vector;
    vector = new DenseDoubleVector(length);
    for (int i = 0; i < length; i++) {
      vector.set(i, in.readDouble());
    }
    return vector;
  }
View Full Code Here

  public static int compareVector(VectorWritable a, VectorWritable o) {
    return compareVector(a.getVector(), o.getVector());
  }

  public static int compareVector(DoubleVector a, DoubleVector o) {
    DoubleVector subtract = a.subtract(o);
    return (int) subtract.sum();
  }
View Full Code Here

public class LogisticRegressionModelTest {

  @Test
  public void testCorrectCostCalculation() throws Exception {
    LogisticRegressionModel logisticRegressionModel = new LogisticRegressionModel();
    DoubleVector x = new DenseDoubleVector(new double[]{2, 3, 4});
    double y = 1;
    DoubleVector theta = new DenseDoubleVector(new double[]{1, 1, 1});
    Double cost = logisticRegressionModel.calculateCostForItem(x, y, 2, theta);
    assertEquals("wrong cost calculation for logistic regression", Double.valueOf(6.170109486162941E-5), cost);
  }
View Full Code Here

    // read an item
    KeyValuePair<VectorWritable, DoubleWritable> kvp;
    while ((kvp = peer.readNext()) != null) {
      // calculate cost for given input
      double y = kvp.getValue().get();
      DoubleVector x = kvp.getKey().getVector();
      double costForX = regressionModel.calculateCostForItem(x, y, m, theta);

      // adds to local cost
      localCost += costForX;
    }
View Full Code Here

      BSPPeer<VectorWritable, DoubleWritable, VectorWritable, DoubleWritable, VectorWritable> peer)
      throws IOException {
    KeyValuePair<VectorWritable, DoubleWritable> kvp;
    double[] thetaDelta = new double[theta.getLength()];
    while ((kvp = peer.readNext()) != null) {
      DoubleVector x = kvp.getKey().getVector();
      double y = kvp.getValue().get();
      double difference = regressionModel.applyHypothesis(theta, x) - y;
      for (int j = 0; j < theta.getLength(); j++) {
        thetaDelta[j] += difference * x.get(j);
      }
    }
    return thetaDelta;
  }
View Full Code Here

    try {
      reader = new SequenceFile.Reader(fs, centroids, peer.getConfiguration());
      VectorWritable key = new VectorWritable();
      NullWritable value = NullWritable.get();
      while (reader.next(key, value)) {
        DoubleVector center = key.getVector();
        centers.add(center);
      }
    } catch (IOException e) {
      throw new RuntimeException(e);
    } finally {
View Full Code Here

    DoubleVector[] msgCenters = new DoubleVector[centers.length];
    int[] incrementSum = new int[centers.length];
    CenterMessage msg;
    // basically just summing incoming vectors
    while ((msg = peer.getCurrentMessage()) != null) {
      DoubleVector oldCenter = msgCenters[msg.getCenterIndex()];
      DoubleVector newCenter = msg.getData();
      incrementSum[msg.getCenterIndex()] += msg.getIncrementCounter();
      if (oldCenter == null) {
        msgCenters[msg.getCenterIndex()] = newCenter;
      } else {
        msgCenters[msg.getCenterIndex()] = oldCenter.add(newCenter);
      }
    }
    // divide by how often we globally summed vectors
    for (int i = 0; i < msgCenters.length; i++) {
      // and only if we really have an update for c
      if (msgCenters[i] != null) {
        msgCenters[i] = msgCenters[i].divide(incrementSum[i]);
      }
    }
    // finally check for convergence by the absolute difference
    long convergedCounter = 0L;
    for (int i = 0; i < msgCenters.length; i++) {
      final DoubleVector oldCenter = centers[i];
      if (msgCenters[i] != null) {
        double calculateError = oldCenter.subtract(msgCenters[i]).abs().sum();
        if (calculateError > 0.0d) {
          centers[i] = msgCenters[i];
          convergedCounter++;
        }
      }
View Full Code Here

TOP

Related Classes of org.apache.hama.ml.math.DoubleVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.