Package edu.ucla.sspace.matrix

Examples of edu.ucla.sspace.matrix.ArrayMatrix$RowVector


        // Transform the vector according to this instance's transform's state,
        // which should normalize the vector as the original vectors were.
        DoubleVector transformed = transform.transform(docVec);

        // Represent the document as a 1-column matrix       
        Matrix queryAsMatrix = new ArrayMatrix(1, numDims);
        for (int nz : docVec.getNonZeroIndices())
            queryAsMatrix.set(0, nz, transformed.get(nz));
       
        // Project the new document vector, d, by using
        //
        //   d * U_k * Sigma_k^-1
        //
View Full Code Here


            {7, 8, 5, 0},
            {3, 8, 0, 0},
            {7, 2, 9, 2},
        };

        Matrix matrix = new ArrayMatrix(values);
        Assignment[] as = new Assignment[] {
            new HardAssignment(0),
            new HardAssignment(0),
            new HardAssignment(1),
            new HardAssignment(1),
View Full Code Here

            {7, 8, 5, 0},
            {3, 8, 0, 0},
            {7, 2, 9, 2},
        };

        Matrix matrix = new ArrayMatrix(values);
        Assignment[] as = new Assignment[] {
            new HardAssignment(0),
            new HardAssignment(0),
            new HardAssignment(1),
            new HardAssignment(1),
View Full Code Here

            {7, 8, 5, 0},
            {3, 8, 0, 0},
            {7, 2, 9, 2},
        };

        Matrix matrix = new ArrayMatrix(values);
        Assignment[] as = new Assignment[] {
            new HardAssignment(0),
            new HardAssignment(0),
            new HardAssignment(1),
            new HardAssignment(1),
View Full Code Here

            numDimensions >= m.rows())
            throw new IllegalArgumentException(
                    "Cannot factorize with more dimensions than there are " +
                    "rows or columns");
        this.numDimensions = numDimensions;
        A = new ArrayMatrix(m.rows(), numDimensions);
        initialize(A);
        X = new ArrayMatrix(numDimensions, m.columns());
        initialize(X);

        for (int i = 0; i < numIterations; ++i) {
            updateX(computeGofX(m), computeLearningRateX());
            updateA(computeGofA(m), computeLearningRateA());
View Full Code Here

     * Without ever completely computing (AX-Y).  This is done by computing
     * (AX-Y) on a row by row basis and then multiplying the row of (AX-Y) by
     * the relevant parts of A' to update the final matrix.
     */
    private Matrix computeGofX(SparseMatrix Y) {
        Matrix G = new ArrayMatrix(numDimensions, X.columns());
        // Iterate through each row of Y, and thux AX-Y.  For each row, compute
        // the difference between the two matrics.  Then multiply the values in
        // the correspond row of A against this difference matrix.  Instead of
        // computing the inner product of A' and (AX-Y), we update each k,m
        // value in G by iterating through the columns of A.
        for (int r = 0; r < Y.rows(); ++r) {
            // Get the row vector of Y.
            SparseDoubleVector v = Y.getRowVector(r);
            double[] vDiff = new double[v.length()];
            // Compute the difference between the row vector of AX and the row
            // vector of Y.  This is the straightforward dot product between A_i
            // and X'_i.
            for (int c = 0; c < X.columns(); c++) {
                double sum = 0;
                for (int k = 0; k < A.columns(); ++k)
                    sum += A.get(r, k) * X.get(k, c);
                vDiff[c] = sum - v.get(c);
            }

            // Now get the row vector A_i and for each column k, multiply it
            // against each column c in vDiff to get the difference in the
            // gradient G_{k, c}.
            for (int k = 0; k < A.columns(); ++k)
                for (int c = 0; c < X.columns(); ++c)
                    G.set(k, c, G.get(k, c) + A.get(r, k) * vDiff[c]);
        }
        return G;
    }
View Full Code Here

     * Without every completely computing (AX-Y).  This computation is
     * straightforward as the rows of (AX-Y) can be fully utilized in the naive
     * matrix multiplication against X' without any inverted traversals.
     */
    private Matrix computeGofA(SparseMatrix Y) {
        Matrix G = new ArrayMatrix(A.rows(), numDimensions);
        // Iterate through each row of Y, and thux AX-Y.  For each row, compute
        // the difference between the two matrics.  Then multiply the values in
        // the correspond column of X' against this difference matrix. 
        for (int r = 0; r < Y.rows(); ++r) {
            // Get the row vector of Y.
            SparseDoubleVector v = Y.getRowVector(r);
            double[] vDiff = new double[v.length()];
            // Compute the difference between the row vector of AX and the row
            // vector of Y.  This is the straightforward dot product between A_i
            // and X'_i.
            for (int c = 0; c < X.columns(); c++) {
                double sum = 0;
                for (int k = 0; k < A.columns(); ++k)
                    sum += A.get(r, k) * X.get(k, c);
                vDiff[c] = sum - v.get(c);
            }

            for (int k = 0; k < X.rows(); ++k) {
                double sum = 0;
                for (int c = 0; c < X.columns(); ++c)
                    sum += vDiff[c] * X.get(k, c);
                G.set(r, k, sum);
            }
        }
        return G;
    }
View Full Code Here

     * {@inheritDoc}
     */
    public Matrix dataClasses() {
        if (!scaledDataClasses) {
            scaledDataClasses = true;
            dataClasses = new ArrayMatrix(U.rows(), U.columns());
            // Weight the values in the data point space by the singular
            // values.
            //
            // REMINDER: when the RowScaledMatrix class is merged in with
            // the trunk, this code should be replaced.
View Full Code Here

     * {@inheritDoc}
     */
    public Matrix classFeatures() {
        if (!scaledClassFeatures) {
            scaledClassFeatures = true;
            classFeatures = new ArrayMatrix(V.rows(), V.columns());
            // Weight the values in the document space by the singular
            // values.
            //
            // REMINDER: when the RowScaledMatrix class is merged in with
            // the trunk, this code should be replaced.
View Full Code Here

        int index = 0;
       
        // reusable array for writing rows into the matrix
        double[] row = new double[columns];
       
        matrix = new ArrayMatrix(rows, columns);

        while ((line = br.readLine()) != null) {
            if (index >= rows)
                throw new IOException("More rows than specified");
            String[] termVectorPair = line.split("\\|");
View Full Code Here

TOP

Related Classes of edu.ucla.sspace.matrix.ArrayMatrix$RowVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.