Package org.apache.mahout.utils.vectors

Examples of org.apache.mahout.utils.vectors.RandomVectorIterable$VectIterator


    Path path = new Path(tmpFile.getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    SequenceFile.Writer seqWriter = new SequenceFile.Writer(fs, conf, path, LongWritable.class, VectorWritable.class);
    SequenceFileVectorWriter writer = new SequenceFileVectorWriter(seqWriter);
    RandomVectorIterable iter = new RandomVectorIterable(50);
    writer.write(iter);
    writer.close();
   
    SequenceFile.Reader seqReader = new SequenceFile.Reader(fs, path, conf);
    LongWritable key = new LongWritable();
View Full Code Here


    Path path = getTestTempFilePath("sfvw");
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    SequenceFile.Writer seqWriter = new SequenceFile.Writer(fs, conf, path, LongWritable.class, VectorWritable.class);
    SequenceFileVectorWriter writer = new SequenceFileVectorWriter(seqWriter);
    Iterable<Vector> iter = new RandomVectorIterable(50);
    writer.write(iter);
    writer.close();
   
    SequenceFile.Reader seqReader = new SequenceFile.Reader(fs, path, conf);
    Writable key = new LongWritable();
View Full Code Here

    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    SequenceFile.Writer seqWriter = new SequenceFile.Writer(fs, conf, path, LongWritable.class, VectorWritable.class);
    SequenceFileVectorWriter writer = new SequenceFileVectorWriter(seqWriter);
    try {
      writer.write(new RandomVectorIterable(50));
    } finally {
      Closeables.close(writer, false);
    }

    long count = HadoopUtil.countRecords(path, conf);
View Full Code Here

        String vecStr = VectorHelper.vectorToCSVString(vector, false);
        getWriter().write(vecStr);
      }
    };
    try {
      Iterable<Vector> iter = new RandomVectorIterable(50);
      writer.write(iter);
    } finally {
      Closeables.close(writer, false);
    }
    Iterator<Vector> csvIter = new CSVVectorIterator(new StringReader(sWriter.getBuffer().toString()));
View Full Code Here

    Path path = new Path(tmpFile.getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    SequenceFile.Writer seqWriter = new SequenceFile.Writer(fs, conf, path, LongWritable.class, SparseVector.class);
    SequenceFileVectorWriter writer = new SequenceFileVectorWriter(seqWriter);
    RandomVectorIterable iter = new RandomVectorIterable(50);
    writer.write(iter);
    writer.close();

    SequenceFile.Reader seqReader = new SequenceFile.Reader(fs, path, conf);
    LongWritable key = new LongWritable();
View Full Code Here

    Path path = getTestTempFilePath("sfvw");
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    SequenceFile.Writer seqWriter = new SequenceFile.Writer(fs, conf, path, LongWritable.class, VectorWritable.class);
    SequenceFileVectorWriter writer = new SequenceFileVectorWriter(seqWriter);
    Iterable<Vector> iter = new RandomVectorIterable(50);
    writer.write(iter);
    writer.close();

    long count = HadoopUtil.countRecords(path, conf);
    assertEquals(50, count);
View Full Code Here

      public void write(Vector vector) throws IOException {
        String vecStr = VectorHelper.vectorToCSVString(vector, false);
        getWriter().write(vecStr);
      }
    };
    Iterable<Vector> iter = new RandomVectorIterable(50);
    jwvw.write(iter);
    jwvw.close();
    Iterator<Vector> csvIter = new CSVVectorIterator(new StringReader(sWriter.getBuffer().toString()));
    int count = 0;
    while (csvIter.hasNext()) {
View Full Code Here

    Configuration conf = getConfiguration();
    FileSystem fs = FileSystem.get(conf);
    SequenceFile.Writer seqWriter = new SequenceFile.Writer(fs, conf, path, LongWritable.class, VectorWritable.class);
    SequenceFileVectorWriter writer = new SequenceFileVectorWriter(seqWriter);
    try {
      writer.write(new RandomVectorIterable(50));
    } finally {
      Closeables.close(writer, false);
    }

    long count = HadoopUtil.countRecords(path, conf);
View Full Code Here

        String vecStr = VectorHelper.vectorToCSVString(vector, false);
        getWriter().write(vecStr);
      }
    };
    try {
      Iterable<Vector> iter = new RandomVectorIterable(50);
      writer.write(iter);
    } finally {
      Closeables.close(writer, false);
    }
    Iterator<Vector> csvIter = new CSVVectorIterator(new StringReader(sWriter.getBuffer().toString()));
View Full Code Here

    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);
    SequenceFile.Writer seqWriter = new SequenceFile.Writer(fs, conf, path, LongWritable.class, VectorWritable.class);
    SequenceFileVectorWriter writer = new SequenceFileVectorWriter(seqWriter);
    try {
      writer.write(new RandomVectorIterable(50));
    } finally {
      Closeables.closeQuietly(writer);
    }

    long count = HadoopUtil.countRecords(path, conf);
View Full Code Here

TOP

Related Classes of org.apache.mahout.utils.vectors.RandomVectorIterable$VectIterator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.