Package org.apache.hama.commons.math

Examples of org.apache.hama.commons.math.DenseDoubleVector


      if (usePipesVectorWritable) {
        writer = new SequenceFile.Writer(fs, conf, path, IntWritable.class,
            PipesVectorWritable.class);

        for (int i = 0; i < matrix.length; i++) {
          DenseDoubleVector rowVector = new DenseDoubleVector(matrix[i]);
          writer.append(new IntWritable(i), new PipesVectorWritable(rowVector));
          LOG.debug("IntWritable: " + i + " PipesVectorWritable: "
              + rowVector.toString());
        }

      } else {
        writer = new SequenceFile.Writer(fs, conf, path, IntWritable.class,
            VectorWritable.class);

        for (int i = 0; i < matrix.length; i++) {
          DenseDoubleVector rowVector = new DenseDoubleVector(matrix[i]);
          writer.append(new IntWritable(i), new VectorWritable(rowVector));
          LOG.debug("IntWritable: " + i + " VectorWritable: "
              + rowVector.toString());
        }
      }

    } catch (IOException e) {
      e.printStackTrace();
View Full Code Here


        this.layerSizeList.get(0) - 1));
    // transform the features to another space
    DoubleVector transformedInstance = this.featureTransformer
        .transform(instance);
    // add bias feature
    DoubleVector instanceWithBias = new DenseDoubleVector(
        transformedInstance.getDimension() + 1);
    instanceWithBias.set(0, 0.99999); // set bias to be a little bit less than
                                      // 1.0
    for (int i = 1; i < instanceWithBias.getDimension(); ++i) {
      instanceWithBias.set(i, transformedInstance.get(i - 1));
    }

    List<DoubleVector> outputCache = getOutputInternal(instanceWithBias);
    // return the output of the last layer
    DoubleVector result = outputCache.get(outputCache.size() - 1);
View Full Code Here

    DoubleVector vec = weightMatrix.multiplyVectorUnsafe(intermediateOutput);
    vec = vec.applyToElements(this.squashingFunctionList.get(fromLayer));

    // add bias
    DoubleVector vecWithBias = new DenseDoubleVector(vec.getDimension() + 1);
    vecWithBias.set(0, 1);
    for (int i = 0; i < vec.getDimension(); ++i) {
      vecWithBias.set(i + 1, vec.get(i));
    }
    return vecWithBias;
  }
View Full Code Here

              .format(
                  "The dimension of training instance is %d, but requires %d.",
                  trainingInstance.getDimension(), inputDimension
                      + outputDimension));

      inputInstance = new DenseDoubleVector(this.layerSizeList.get(0));
      inputInstance.set(0, 1); // add bias
      // get the features from the transformed vector
      for (int i = 0; i < inputDimension; ++i) {
        inputInstance.set(i + 1, transformedVector.get(i));
      }
      // get the labels from the original training instance
      labels = trainingInstance.sliceUnsafe(inputInstance.getDimension() - 1,
          trainingInstance.getDimension() - 1);
    } else if (this.learningStyle == LearningStyle.UNSUPERVISED) {
      // labels are identical to input features
      outputDimension = inputDimension;
      // validate training instance
      Preconditions.checkArgument(inputDimension == trainingInstance
          .getDimension(), String.format(
          "The dimension of training instance is %d, but requires %d.",
          trainingInstance.getDimension(), inputDimension));

      inputInstance = new DenseDoubleVector(this.layerSizeList.get(0));
      inputInstance.set(0, 1); // add bias
      // get the features from the transformed vector
      for (int i = 0; i < inputDimension; ++i) {
        inputInstance.set(i + 1, transformedVector.get(i));
      }
View Full Code Here

        .size()];
    for (int m = 0; m < weightUpdateMatrices.length; ++m) {
      weightUpdateMatrices[m] = new DenseDoubleMatrix(this.weightMatrixList
          .get(m).getRowCount(), this.weightMatrixList.get(m).getColumnCount());
    }
    DoubleVector deltaVec = new DenseDoubleVector(
        this.layerSizeList.get(this.layerSizeList.size() - 1));

    DoubleFunction squashingFunction = this.squashingFunctionList
        .get(this.squashingFunctionList.size() - 1);

    DoubleMatrix lastWeightMatrix = this.weightMatrixList
        .get(this.weightMatrixList.size() - 1);
    for (int i = 0; i < deltaVec.getDimension(); ++i) {
      double costFuncDerivative = this.costFunction.applyDerivative(
          labels.get(i), output.get(i + 1));
      // add regularization
      costFuncDerivative += this.regularizationWeight
          * lastWeightMatrix.getRowVector(i).sum();
      deltaVec.set(i, costFuncDerivative);
      deltaVec.set(
          i,
          deltaVec.get(i)
              * squashingFunction.applyDerivative(output.get(i + 1)));
    }

    // start from previous layer of output layer
    for (int layer = this.layerSizeList.size() - 2; layer >= 0; --layer) {
View Full Code Here

   * The model meta-data is stored in memory.
   */
  public DoubleVector outputWrapper(DoubleVector featureVector) {
    List<double[]> outputCache = this.outputInternal(featureVector);
    // the output of the last layer is the output of the MLP
    return new DenseDoubleVector(outputCache.get(outputCache.size() - 1));
  }
View Full Code Here

    double[] trainingFeature = this.featureTransformer.transform(
        trainingInstance.sliceUnsafe(0, this.layerSizeArray[0] - 1)).toArray();
    double[] trainingLabels = Arrays.copyOfRange(trainingVec,
        this.layerSizeArray[0], trainingVec.length);

    DoubleVector trainingFeatureVec = new DenseDoubleVector(trainingFeature);
    List<double[]> outputCache = this.outputInternal(trainingFeatureVec);

    // calculate the delta of output layer
    double[] delta = new double[this.layerSizeArray[this.layerSizeArray.length - 1]];
    double[] outputLayerOutput = outputCache.get(outputCache.size() - 1);
View Full Code Here

      if (hasKey) {
        columnLength = columnLength - 1;
        indexPos++;
      }

      DenseDoubleVector vec = new DenseDoubleVector(columnLength);
      for (int j = 0; j < columnLength; j++) {
        vec.set(j, Double.parseDouble(split[j + indexPos]));
      }

      VectorWritable vector;
      if (hasKey) {
        NamedDoubleVector named = new NamedDoubleVector(split[0], vec);
View Full Code Here

      double[] arr = new double[dimension];
      for (int d = 0; d < dimension; d++) {
        arr[d] = r.nextInt(count);
      }
      VectorWritable vector = new VectorWritable(new DenseDoubleVector(arr));
      dataWriter.append(vector, value);
      if (k > i) {
        centerWriter.append(vector, value);
      }
    }
View Full Code Here

   *
   * @param inputInstance
   * @return The compressed information.
   */
  private DoubleVector transform(DoubleVector inputInstance, int inputLayer) {
    DoubleVector internalInstance = new DenseDoubleVector(inputInstance.getDimension() + 1);
    internalInstance.set(0, 1);
    for (int i = 0; i < inputInstance.getDimension(); ++i) {
      internalInstance.set(i + 1, inputInstance.get(i));
    }
    DoubleFunction squashingFunction = model
        .getSquashingFunction(inputLayer);
    DoubleMatrix weightMatrix = null;
    if (inputLayer == 0) {
View Full Code Here

TOP

Related Classes of org.apache.hama.commons.math.DenseDoubleVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.