Examples of HalfStoreFileReader


Examples of org.apache.hadoop.hbase.io.HalfStoreFileReader

      Configuration conf, Path inFile, Path outFile, Reference reference,
      HColumnDescriptor familyDescriptor)
  throws IOException {
    FileSystem fs = inFile.getFileSystem(conf);
    CacheConfig cacheConf = new CacheConfig(conf);
    HalfStoreFileReader halfReader = null;
    StoreFile.Writer halfWriter = null;
    HFileDataBlockEncoder dataBlockEncoder = new HFileDataBlockEncoderImpl(
        familyDescriptor.getDataBlockEncodingOnDisk(),
        familyDescriptor.getDataBlockEncoding());
    try {
      halfReader = new HalfStoreFileReader(fs, inFile, cacheConf,
          reference, DataBlockEncoding.NONE);
      Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();

      int blocksize = familyDescriptor.getBlocksize();
      Algorithm compression = familyDescriptor.getCompression();
      BloomType bloomFilterType = familyDescriptor.getBloomFilterType();

      halfWriter = new StoreFile.WriterBuilder(conf, cacheConf,
          fs, blocksize)
              .withFilePath(outFile)
              .withCompression(compression)
              .withDataBlockEncoder(dataBlockEncoder)
              .withBloomType(bloomFilterType)
              .withChecksumType(HStore.getChecksumType(conf))
              .withBytesPerChecksum(HStore.getBytesPerChecksum(conf))
              .build();
      HFileScanner scanner = halfReader.getScanner(false, false, false);
      scanner.seekTo();
      do {
        KeyValue kv = scanner.getKeyValue();
        halfWriter.append(kv);
      } while (scanner.next());

      for (Map.Entry<byte[],byte[]> entry : fileInfo.entrySet()) {
        if (shouldCopyHFileMetaKey(entry.getKey())) {
          halfWriter.appendFileInfo(entry.getKey(), entry.getValue());
        }
      }
    } finally {
      if (halfWriter != null) halfWriter.close();
      if (halfReader != null) halfReader.close(cacheConf.shouldEvictOnClose());
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.HalfStoreFileReader

      reader = this.coprocessorHost.preStoreFileReaderOpen(fs, this.getPath(), in, length,
        cacheConf, reference);
    }
    if (reader == null) {
      if (this.reference != null) {
        reader = new HalfStoreFileReader(fs, this.getPath(), in, length, cacheConf, reference,
          conf);
      } else {
        reader = new StoreFile.Reader(fs, this.getPath(), in, length, cacheConf, conf);
      }
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.HalfStoreFileReader

      Configuration conf, Path inFile, Path outFile, Reference reference,
      HColumnDescriptor familyDescriptor)
  throws IOException {
    FileSystem fs = inFile.getFileSystem(conf);
    CacheConfig cacheConf = new CacheConfig(conf);
    HalfStoreFileReader halfReader = null;
    StoreFile.Writer halfWriter = null;
    try {
      halfReader = new HalfStoreFileReader(fs, inFile, cacheConf, reference, conf);
      Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();

      int blocksize = familyDescriptor.getBlocksize();
      Algorithm compression = familyDescriptor.getCompression();
      BloomType bloomFilterType = familyDescriptor.getBloomFilterType();
      HFileContext hFileContext = new HFileContextBuilder()
                                  .withCompression(compression)
                                  .withChecksumType(HStore.getChecksumType(conf))
                                  .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))
                                  .withBlockSize(blocksize)
                                  .withDataBlockEncoding(familyDescriptor.getDataBlockEncoding())
                                  .build();
      halfWriter = new StoreFile.WriterBuilder(conf, cacheConf,
          fs)
              .withFilePath(outFile)
              .withBloomType(bloomFilterType)
              .withFileContext(hFileContext)
              .build();
      HFileScanner scanner = halfReader.getScanner(false, false, false);
      scanner.seekTo();
      do {
        KeyValue kv = KeyValueUtil.ensureKeyValue(scanner.getKeyValue());
        halfWriter.append(kv);
      } while (scanner.next());

      for (Map.Entry<byte[],byte[]> entry : fileInfo.entrySet()) {
        if (shouldCopyHFileMetaKey(entry.getKey())) {
          halfWriter.appendFileInfo(entry.getKey(), entry.getValue());
        }
      }
    } finally {
      if (halfWriter != null) halfWriter.close();
      if (halfReader != null) halfReader.close(cacheConf.shouldEvictOnClose());
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.HalfStoreFileReader

      Configuration conf, Path inFile, Path outFile, Reference reference,
      HColumnDescriptor familyDescriptor)
  throws IOException {
    FileSystem fs = inFile.getFileSystem(conf);
    CacheConfig cacheConf = new CacheConfig(conf);
    HalfStoreFileReader halfReader = null;
    StoreFile.Writer halfWriter = null;
    HFileDataBlockEncoder dataBlockEncoder = new HFileDataBlockEncoderImpl(
        familyDescriptor.getDataBlockEncodingOnDisk(),
        familyDescriptor.getDataBlockEncoding());
    try {
      halfReader = new HalfStoreFileReader(fs, inFile, cacheConf,
          reference, DataBlockEncoding.NONE);
      Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();

      int blocksize = familyDescriptor.getBlocksize();
      Algorithm compression = familyDescriptor.getCompression();
      BloomType bloomFilterType = familyDescriptor.getBloomFilterType();

      halfWriter = new StoreFile.WriterBuilder(conf, cacheConf,
          fs, blocksize)
              .withFilePath(outFile)
              .withCompression(compression)
              .withDataBlockEncoder(dataBlockEncoder)
              .withBloomType(bloomFilterType)
              .withChecksumType(Store.getChecksumType(conf))
              .withBytesPerChecksum(Store.getBytesPerChecksum(conf))
              .build();
      HFileScanner scanner = halfReader.getScanner(false, false, false);
      scanner.seekTo();
      do {
        KeyValue kv = scanner.getKeyValue();
        halfWriter.append(kv);
      } while (scanner.next());

      for (Map.Entry<byte[],byte[]> entry : fileInfo.entrySet()) {
        if (shouldCopyHFileMetaKey(entry.getKey())) {
          halfWriter.appendFileInfo(entry.getKey(), entry.getValue());
        }
      }
    } finally {
      if (halfWriter != null) halfWriter.close();
      if (halfReader != null) halfReader.close(cacheConf.shouldEvictOnClose());
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.HalfStoreFileReader

    if (this.reader != null) {
      throw new IllegalAccessError("Already open");
    }
    if (isReference()) {
      if (this.link != null) {
        this.reader = new HalfStoreFileReader(this.fs, this.referencePath, this.link,
          this.cacheConf, this.reference, dataBlockEncoder.getEncodingInCache());
      } else {
        this.reader = new HalfStoreFileReader(this.fs, this.referencePath,
          this.cacheConf, this.reference, dataBlockEncoder.getEncodingInCache());
      }
    } else if (isLink()) {
      long size = link.getFileStatus(fs).getLen();
      this.reader = new Reader(this.fs, this.path, link, size, this.cacheConf,
View Full Code Here

Examples of org.apache.hadoop.hbase.io.HalfStoreFileReader

      Configuration conf, Path inFile, Path outFile, Reference reference,
      HColumnDescriptor familyDescriptor)
  throws IOException {
    FileSystem fs = inFile.getFileSystem(conf);
    CacheConfig cacheConf = new CacheConfig(conf);
    HalfStoreFileReader halfReader = null;
    StoreFile.Writer halfWriter = null;
    HFileDataBlockEncoder dataBlockEncoder = new HFileDataBlockEncoderImpl(
      familyDescriptor.getDataBlockEncoding());
    try {
      halfReader = new HalfStoreFileReader(fs, inFile, cacheConf, reference);
      Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();

      int blocksize = familyDescriptor.getBlocksize();
      Algorithm compression = familyDescriptor.getCompression();
      BloomType bloomFilterType = familyDescriptor.getBloomFilterType();

      halfWriter = new StoreFile.WriterBuilder(conf, cacheConf,
          fs, blocksize)
              .withFilePath(outFile)
              .withCompression(compression)
              .withDataBlockEncoder(dataBlockEncoder)
              .withBloomType(bloomFilterType)
              .withChecksumType(HStore.getChecksumType(conf))
              .withBytesPerChecksum(HStore.getBytesPerChecksum(conf))
              .build();
      HFileScanner scanner = halfReader.getScanner(false, false, false);
      scanner.seekTo();
      do {
        KeyValue kv = scanner.getKeyValue();
        halfWriter.append(kv);
      } while (scanner.next());

      for (Map.Entry<byte[],byte[]> entry : fileInfo.entrySet()) {
        if (shouldCopyHFileMetaKey(entry.getKey())) {
          halfWriter.appendFileInfo(entry.getKey(), entry.getValue());
        }
      }
    } finally {
      if (halfWriter != null) halfWriter.close();
      if (halfReader != null) halfReader.close(cacheConf.shouldEvictOnClose());
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.HalfStoreFileReader

      status = fileStatus;
    }
    long length = status.getLen();
    if (this.reference != null) {
      hdfsBlocksDistribution = computeRefFileHDFSBlockDistribution(fs, reference, status);
      return new HalfStoreFileReader(
          fs, this.getPath(), in, length, cacheConf, reference);
    } else {
      hdfsBlocksDistribution = FSUtils.computeHDFSBlocksDistribution(fs, status, 0, length);
      return new StoreFile.Reader(fs, this.getPath(), in, length, cacheConf);
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.HalfStoreFileReader

    if (this.reader != null) {
      throw new IllegalAccessError("Already open");
    }
    if (isReference()) {
      if (this.link != null) {
        this.reader = new HalfStoreFileReader(this.fs, this.referencePath, this.link,
          this.cacheConf, this.reference, dataBlockEncoder.getEncodingInCache());
      } else {
        if (conf.getBoolean("hbase.use.secondary.index", false) && isIndexRegionReference()) {
          String indexHalfStoreFileClass = conf.get("hbase.index.half.storefile.reader.class");
          if (indexHalfStoreFileClass == null) {
            throw new RuntimeException(
                "Class for index half store files should be present.  Configure the property hbase.index.half.storefile.reader.class");
          }
          try {
            Class<?> indexHalfStoreReader = Class.forName(indexHalfStoreFileClass.trim());
            Constructor<?> constructor =
                indexHalfStoreReader.getConstructor(FileSystem.class, Path.class,
                  CacheConfig.class, Reference.class, DataBlockEncoding.class);
            this.reader =
                (Reader) constructor.newInstance(this.fs, this.referencePath, this.cacheConf,
                  this.reference, dataBlockEncoder.getEncodingInCache());
          } catch (Throwable e) {
            LOG.error("Error while initializing/invoking constructor of IndexHalfStoreFileReader.",
              e);
            throw new RuntimeException(
                "Error while initializing/invoking constructor of IndexHalfStoreFileReader.");
          }
        } else {
          this.reader =
              new HalfStoreFileReader(this.fs, this.referencePath, this.cacheConf, this.reference,
                  dataBlockEncoder.getEncodingInCache());
        }
      }
    } else if (isLink()) {
      long size = link.getFileStatus(fs).getLen();
View Full Code Here

Examples of org.apache.hadoop.hbase.io.HalfStoreFileReader

   */
  private static void copyHFileHalf(Configuration conf, Path inFile, Path outFile,
      Reference reference, HColumnDescriptor familyDescriptor) throws IOException {
    FileSystem fs = inFile.getFileSystem(conf);
    CacheConfig cacheConf = new CacheConfig(conf);
    HalfStoreFileReader halfReader = null;
    StoreFile.Writer halfWriter = null;
    HFileDataBlockEncoder dataBlockEncoder =
        new HFileDataBlockEncoderImpl(familyDescriptor.getDataBlockEncodingOnDisk(),
            familyDescriptor.getDataBlockEncoding());
    try {
      halfReader =
          new HalfStoreFileReader(fs, inFile, cacheConf, reference, DataBlockEncoding.NONE);
      Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();

      int blocksize = familyDescriptor.getBlocksize();
      Algorithm compression = familyDescriptor.getCompression();
      BloomType bloomFilterType = familyDescriptor.getBloomFilterType();

      halfWriter =
          new StoreFile.WriterBuilder(conf, cacheConf, fs, blocksize).withFilePath(outFile)
              .withCompression(compression).withDataBlockEncoder(dataBlockEncoder)
              .withBloomType(bloomFilterType).withChecksumType(Store.getChecksumType(conf))
              .withBytesPerChecksum(Store.getBytesPerChecksum(conf)).build();
      HFileScanner scanner = halfReader.getScanner(false, false, false);
      scanner.seekTo();
      do {
        KeyValue kv = scanner.getKeyValue();
        halfWriter.append(kv);
      } while (scanner.next());

      for (Map.Entry<byte[], byte[]> entry : fileInfo.entrySet()) {
        if (shouldCopyHFileMetaKey(entry.getKey())) {
          halfWriter.appendFileInfo(entry.getKey(), entry.getValue());
        }
      }
    } finally {
      if (halfWriter != null) halfWriter.close();
      if (halfReader != null) halfReader.close(cacheConf.shouldEvictOnClose());
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.HalfStoreFileReader

      Configuration conf, Path inFile, Path outFile, Reference reference,
      HColumnDescriptor familyDescriptor)
  throws IOException {
    FileSystem fs = inFile.getFileSystem(conf);
    CacheConfig cacheConf = new CacheConfig(conf);
    HalfStoreFileReader halfReader = null;
    StoreFile.Writer halfWriter = null;
    try {
      halfReader = new HalfStoreFileReader(fs, inFile, cacheConf, reference, conf);
      Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();

      int blocksize = familyDescriptor.getBlocksize();
      Algorithm compression = familyDescriptor.getCompression();
      BloomType bloomFilterType = familyDescriptor.getBloomFilterType();
      HFileContext hFileContext = new HFileContextBuilder()
                                  .withCompression(compression)
                                  .withChecksumType(HStore.getChecksumType(conf))
                                  .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))
                                  .withBlockSize(blocksize)
                                  .withDataBlockEncoding(familyDescriptor.getDataBlockEncoding())
                                  .build();
      halfWriter = new StoreFile.WriterBuilder(conf, cacheConf,
          fs)
              .withFilePath(outFile)
              .withBloomType(bloomFilterType)
              .withFileContext(hFileContext)
              .build();
      HFileScanner scanner = halfReader.getScanner(false, false, false);
      scanner.seekTo();
      do {
        KeyValue kv = scanner.getKeyValue();
        halfWriter.append(kv);
      } while (scanner.next());

      for (Map.Entry<byte[],byte[]> entry : fileInfo.entrySet()) {
        if (shouldCopyHFileMetaKey(entry.getKey())) {
          halfWriter.appendFileInfo(entry.getKey(), entry.getValue());
        }
      }
    } finally {
      if (halfWriter != null) halfWriter.close();
      if (halfReader != null) halfReader.close(cacheConf.shouldEvictOnClose());
    }
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.