Examples of HFileBlockDefaultEncodingContext


Examples of org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext

      compressAlgo = compressionAlgorithm == null ? NONE : compressionAlgorithm;
      this.dataBlockEncoder = dataBlockEncoder != null
          ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE;

      defaultBlockEncodingCtx =
          new HFileBlockDefaultEncodingContext(compressionAlgorithm,
              null, DUMMY_HEADER);
      dataBlockEncodingCtx =
        this.dataBlockEncoder.newOnDiskDataBlockEncodingContext(
            compressionAlgorithm, DUMMY_HEADER);
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext

          HFileDataBlockEncoder dataBlockEncoder, boolean includesMemstoreTS,
          ChecksumType checksumType, int bytesPerChecksum) {
      this.dataBlockEncoder = dataBlockEncoder != null
          ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE;
      defaultBlockEncodingCtx =
        new HFileBlockDefaultEncodingContext(compressionAlgorithm, null, HConstants.HFILEBLOCK_DUMMY_HEADER);
      dataBlockEncodingCtx =
        this.dataBlockEncoder.newDataBlockEncodingContext(
            compressionAlgorithm, HConstants.HFILEBLOCK_DUMMY_HEADER);

      if (bytesPerChecksum < HConstants.HFILEBLOCK_HEADER_SIZE) {
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext

        HFileBlockDefaultEncodingContext.class.getName()))) {
      throw new IOException (this.getClass().getName() + " only accepts " +
          HFileBlockDefaultEncodingContext.class.getName() + ".");
    }

    HFileBlockDefaultEncodingContext defaultContext =
        (HFileBlockDefaultEncodingContext) encodeCtx;
    defaultContext.compressAfterEncodingWithBlockType(in.array(), blockType);
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext

  }

  @Override
  public HFileBlockEncodingContext newDataBlockEncodingContext(
      Algorithm compressionAlgorithm, byte[] dummyHeader) {
    return new HFileBlockDefaultEncodingContext(compressionAlgorithm,
        null, dummyHeader);
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext

    DataBlockEncoder encoder = encoding.getEncoder();
    if (encoder != null) {
      return encoder.newDataBlockEncodingContext(
        compressionAlgorithm, encoding, dummyHeader);
    }
    return new HFileBlockDefaultEncodingContext(
      compressionAlgorithm, null, dummyHeader);
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext

    if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
      throw new IOException(this.getClass().getName() + " only accepts "
          + HFileBlockDefaultEncodingContext.class.getName() + " as the " + "encoding context.");
    }

    HFileBlockDefaultEncodingContext encodingCtx
        = (HFileBlockDefaultEncodingContext) blkEncodingCtx;
    encodingCtx.prepareEncoding();
    DataOutputStream dataOut = encodingCtx.getOutputStreamForEncoder();
    internalEncodeKeyValues(dataOut, in, includesMvccVersion);

    //do i need to check this, or will it always be DataBlockEncoding.PREFIX_TREE?
    if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
      encodingCtx.postEncoding(BlockType.ENCODED_DATA);
    } else {
      encodingCtx.postEncoding(BlockType.DATA);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext

    if(DataBlockEncoding.PREFIX_TREE != encoding){
      //i'm not sure why encoding is in the interface.  Each encoder implementation should probably
      //know it's encoding type
      throw new IllegalArgumentException("only DataBlockEncoding.PREFIX_TREE supported");
    }
    return new HFileBlockDefaultEncodingContext(compressionAlgorithm, encoding, header);
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext

    assertEquals(headerSize, cacheBlock.getDummyHeaderForVersion().length);
  }

  private HFileBlock createBlockOnDisk(HFileBlock block) throws IOException {
    int size;
    HFileBlockEncodingContext context = new HFileBlockDefaultEncodingContext(
        Compression.Algorithm.NONE, blockEncoder.getDataBlockEncoding(),
        HConstants.HFILEBLOCK_DUMMY_HEADER);
    context.setDummyHeader(block.getDummyHeaderForVersion());
    blockEncoder.beforeWriteToDisk(block.getBufferWithoutHeader(),
            includesMemstoreTS, context, block.getBlockType());
    byte[] encodedBytes = context.getUncompressedBytesWithHeader();
    size = encodedBytes.length - block.getDummyHeaderForVersion().length;
    return new HFileBlock(context.getBlockType(), size, size, -1,
            ByteBuffer.wrap(encodedBytes), HFileBlock.FILL_HEADER, 0, includesMemstoreTS,
            block.getMinorVersion(), block.getBytesPerChecksum(), block.getChecksumType(),
            block.getOnDiskDataSizeWithHeader());
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext

      encoder.encodeKeyValues(rawBuf, includesMemstoreTS,
          encodingCtx);
      encodedResultWithHeader =
          encodingCtx.getUncompressedBytesWithHeader();
    } else {
      HFileBlockDefaultEncodingContext defaultEncodingCtx =
        new HFileBlockDefaultEncodingContext(algo, encoding, dummyHeader);
      byte[] rawBufWithHeader =
          new byte[rawBuf.array().length + headerLen];
      System.arraycopy(rawBuf.array(), 0, rawBufWithHeader,
          headerLen, rawBuf.array().length);
      defaultEncodingCtx.compressAfterEncodingWithBlockType(rawBufWithHeader,
          BlockType.DATA);
      encodedResultWithHeader =
        defaultEncodingCtx.getUncompressedBytesWithHeader();
    }
    final int encodedSize =
        encodedResultWithHeader.length - headerLen;
    if (encoder != null) {
      // We need to account for the two-byte encoding algorithm ID that
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext

      compressAlgo = compressionAlgorithm == null ? NONE : compressionAlgorithm;
      this.dataBlockEncoder = dataBlockEncoder != null
          ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE;

      defaultBlockEncodingCtx =
          new HFileBlockDefaultEncodingContext(compressionAlgorithm,
              null, DUMMY_HEADER);
      dataBlockEncodingCtx =
        this.dataBlockEncoder.newDataBlockEncodingContext(
            compressionAlgorithm, DUMMY_HEADER);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.