Package org.apache.hadoop.hbase.io.hfile.HFile

Examples of org.apache.hadoop.hbase.io.hfile.HFile.Writer


   * @throws IOException
   */
  public void testEmptyHFile() throws IOException {
    if (cacheConf == null) cacheConf = new CacheConfig(conf);
    Path f = new Path(ROOT_DIR, getName());
    Writer w =
        HFile.getWriterFactory(conf, cacheConf).withPath(fs, f).create();
    w.close();
    Reader r = HFile.createReader(fs, f, cacheConf);
    r.loadFileInfo();
    assertNull(r.getFirstKey());
    assertNull(r.getLastKey());
  }
View Full Code Here


   */
  void basicWithSomeCodec(String codec) throws IOException {
    if (cacheConf == null) cacheConf = new CacheConfig(conf);
    Path ncTFile = new Path(ROOT_DIR, "basic.hfile." + codec.toString());
    FSDataOutputStream fout = createFSOutput(ncTFile);
    Writer writer = HFile.getWriterFactory(conf, cacheConf)
        .withOutputStream(fout)
        .withBlockSize(minBlockSize)
        .withCompression(codec)
        .create();
    LOG.info(writer);
View Full Code Here

  private void metablocks(final String compress) throws Exception {
    if (cacheConf == null) cacheConf = new CacheConfig(conf);
    Path mFile = new Path(ROOT_DIR, "meta.hfile");
    FSDataOutputStream fout = createFSOutput(mFile);
    Writer writer = HFile.getWriterFactory(conf, cacheConf)
        .withOutputStream(fout)
        .withBlockSize(minBlockSize)
        .withCompression(compress)
        .create();
    someTestingWithMetaBlock(writer);
    writer.close();
    fout.close();
    FSDataInputStream fin = fs.open(mFile);
    Reader reader = HFile.createReaderFromStream(mFile, fs.open(mFile),
        this.fs.getFileStatus(mFile).getLen(), cacheConf);
    reader.loadFileInfo();
View Full Code Here

    if (cacheConf == null) cacheConf = new CacheConfig(conf);
    for (Compression.Algorithm compressAlgo :
        HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
      Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile");
      FSDataOutputStream fout = createFSOutput(mFile);
      Writer writer = HFile.getWriterFactory(conf, cacheConf)
          .withOutputStream(fout)
          .withBlockSize(minBlockSize)
          .withCompression(compressAlgo)
          .create();
      writer.append("foo".getBytes(), "value".getBytes());
      writer.close();
      fout.close();
      Reader reader = HFile.createReader(fs, mFile, cacheConf);
      reader.loadFileInfo();
      assertNull(reader.getMetaBlock("non-existant", false));
    }
View Full Code Here

      @Override
      public int compare(byte[] o1, byte[] o2) {
        return compare(o1, 0, o1.length, o2, 0, o2.length);
      }
    };
    Writer writer = HFile.getWriterFactory(conf, cacheConf)
        .withOutputStream(fout)
        .withBlockSize(minBlockSize)
        .withComparator(comparator)
        .create();
    writer.append("3".getBytes(), "0".getBytes());
    writer.append("2".getBytes(), "0".getBytes());
    writer.append("1".getBytes(), "0".getBytes());
    writer.close();
  }
View Full Code Here

   * @throws IOException
   */
  public void testEmptyHFile() throws IOException {
    if (cacheConf == null) cacheConf = new CacheConfig(conf);
    Path f = new Path(ROOT_DIR, getName());
    Writer w =
        HFile.getWriterFactory(conf, cacheConf).withPath(fs, f).create();
    w.close();
    Reader r = HFile.createReader(fs, f, cacheConf);
    r.loadFileInfo();
    assertNull(r.getFirstKey());
    assertNull(r.getLastKey());
  }
View Full Code Here

   * Create a truncated hfile and verify that exception thrown.
   */
  public void testCorruptTruncatedHFile() throws IOException {
    if (cacheConf == null) cacheConf = new CacheConfig(conf);
    Path f = new Path(ROOT_DIR, getName());
    Writer w = HFile.getWriterFactory(conf, cacheConf).withPath(this.fs, f).create();
    writeSomeRecords(w, 0, 100);
    w.close();

    Path trunc = new Path(f.getParent(), "trucated");
    truncateFile(fs, w.getPath(), trunc);

    try {
      Reader r = HFile.createReader(fs, trunc, cacheConf);
    } catch (CorruptHFileException che) {
      // Expected failure
View Full Code Here

   */
  void basicWithSomeCodec(String codec) throws IOException {
    if (cacheConf == null) cacheConf = new CacheConfig(conf);
    Path ncTFile = new Path(ROOT_DIR, "basic.hfile." + codec.toString());
    FSDataOutputStream fout = createFSOutput(ncTFile);
    Writer writer = HFile.getWriterFactory(conf, cacheConf)
        .withOutputStream(fout)
        .withBlockSize(minBlockSize)
        .withCompression(codec)
        .create();
    LOG.info(writer);
View Full Code Here

  private void metablocks(final String compress) throws Exception {
    if (cacheConf == null) cacheConf = new CacheConfig(conf);
    Path mFile = new Path(ROOT_DIR, "meta.hfile");
    FSDataOutputStream fout = createFSOutput(mFile);
    Writer writer = HFile.getWriterFactory(conf, cacheConf)
        .withOutputStream(fout)
        .withBlockSize(minBlockSize)
        .withCompression(compress)
        .create();
    someTestingWithMetaBlock(writer);
    writer.close();
    fout.close();
    FSDataInputStream fin = fs.open(mFile);
    Reader reader = HFile.createReaderFromStream(mFile, fs.open(mFile),
        this.fs.getFileStatus(mFile).getLen(), cacheConf);
    reader.loadFileInfo();
View Full Code Here

    if (cacheConf == null) cacheConf = new CacheConfig(conf);
    for (Compression.Algorithm compressAlgo :
        HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
      Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile");
      FSDataOutputStream fout = createFSOutput(mFile);
      Writer writer = HFile.getWriterFactory(conf, cacheConf)
          .withOutputStream(fout)
          .withBlockSize(minBlockSize)
          .withCompression(compressAlgo)
          .create();
      writer.append("foo".getBytes(), "value".getBytes());
      writer.close();
      fout.close();
      Reader reader = HFile.createReader(fs, mFile, cacheConf);
      reader.loadFileInfo();
      assertNull(reader.getMetaBlock("non-existant", false));
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.io.hfile.HFile.Writer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.