Package org.apache.hadoop.hive.ql.io.RCFile

Examples of org.apache.hadoop.hive.ql.io.RCFile.KeyBuffer


      long uncompressedFileSize = 0;
      long compressedFileSize = 0;
      // Skip from block to block since we only need the header
      while (recordReader.nextBlock()) {
        // Get the sizes from the key buffer and aggregate
        KeyBuffer keyBuffer = recordReader.getKeyBuffer();
        if (uncompressedColumnSizes == null) {
          uncompressedColumnSizes = new long[keyBuffer.getColumnNumber()];
        }
        if (compressedColumnSizes == null) {
          compressedColumnSizes = new long[keyBuffer.getColumnNumber()];
        }
        for (int i = 0; i < keyBuffer.getColumnNumber(); i++) {
          uncompressedColumnSizes[i] += keyBuffer.getEachColumnUncompressedValueLen()[i];
          compressedColumnSizes[i] += keyBuffer.getEachColumnValueLen()[i];
        }
        rowNo += keyBuffer.getNumberRows();
      }

      if (columnSizes && uncompressedColumnSizes != null && compressedColumnSizes != null) {
        // Print out the sizes, if pretty is set, print it out in a human friendly format,
        // otherwise print it out as if it were a row
View Full Code Here


    try {
      //CombineHiveInputFormat is set in PartialScanTask.
      RCFileKeyBufferWrapper key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();

      // calculate rawdatasize
      KeyBuffer keyBuffer = key.getKeyBuffer();
      long[] uncompressedColumnSizes = new long[keyBuffer.getColumnNumber()];
      for (int i = 0; i < keyBuffer.getColumnNumber(); i++) {
        uncompressedColumnSizes[i] += keyBuffer.getEachColumnUncompressedValueLen()[i];
      }
      if (uncompressedColumnSizes != null) {
        for (int i = 0; i < uncompressedColumnSizes.length; i++) {
          uncompressedFileSize += uncompressedColumnSizes[i];
        }
      }

      // calculate no. of rows
      rowNo += keyBuffer.getNumberRows();
    } catch (Throwable e) {
      this.exception = true;
      close();
      throw new IOException(e);
    }
View Full Code Here

    try {
      //CombineHiveInputFormat is set in PartialScanTask.
      RCFileKeyBufferWrapper key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();

      // calculate rawdatasize
      KeyBuffer keyBuffer = key.getKeyBuffer();
      long[] uncompressedColumnSizes = new long[keyBuffer.getColumnNumber()];
      for (int i = 0; i < keyBuffer.getColumnNumber(); i++) {
        uncompressedColumnSizes[i] += keyBuffer.getEachColumnUncompressedValueLen()[i];
      }
      if (uncompressedColumnSizes != null) {
        for (int i = 0; i < uncompressedColumnSizes.length; i++) {
          uncompressedFileSize += uncompressedColumnSizes[i];
        }
      }

      // calculate no. of rows
      rowNo += keyBuffer.getNumberRows();
    } catch (Throwable e) {
      this.exception = true;
      close();
      throw new IOException(e);
    }
View Full Code Here

    try {
      //CombineHiveInputFormat is set in PartialScanTask.
      RCFileKeyBufferWrapper key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();

      // calculate rawdatasize
      KeyBuffer keyBuffer = key.getKeyBuffer();
      long[] uncompressedColumnSizes = new long[keyBuffer.getColumnNumber()];
      for (int i = 0; i < keyBuffer.getColumnNumber(); i++) {
        uncompressedColumnSizes[i] += keyBuffer.getEachColumnUncompressedValueLen()[i];
      }
      if (uncompressedColumnSizes != null) {
        for (int i = 0; i < uncompressedColumnSizes.length; i++) {
          uncompressedFileSize += uncompressedColumnSizes[i];
        }
      }

      // calculate no. of rows
      rowNo += keyBuffer.getNumberRows();
    } catch (Throwable e) {
      this.exception = true;
      close();
      throw new IOException(e);
    }
View Full Code Here

    try {
      //CombineHiveInputFormat is set in PartialScanTask.
      RCFileKeyBufferWrapper key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();

      // calculate rawdatasize
      KeyBuffer keyBuffer = key.getKeyBuffer();
      long[] uncompressedColumnSizes = new long[keyBuffer.getColumnNumber()];
      for (int i = 0; i < keyBuffer.getColumnNumber(); i++) {
        uncompressedColumnSizes[i] += keyBuffer.getEachColumnUncompressedValueLen()[i];
      }
      if (uncompressedColumnSizes != null) {
        for (int i = 0; i < uncompressedColumnSizes.length; i++) {
          uncompressedFileSize += uncompressedColumnSizes[i];
        }
      }

      // calculate no. of rows
      rowNo += keyBuffer.getNumberRows();
    } catch (Throwable e) {
      this.exception = true;
      close();
      throw new IOException(e);
    }
View Full Code Here

      long uncompressedFileSize = 0;
      long compressedFileSize = 0;
      // Skip from block to block since we only need the header
      while (recordReader.nextBlock()) {
        // Get the sizes from the key buffer and aggregate
        KeyBuffer keyBuffer = recordReader.getKeyBuffer();
        if (uncompressedColumnSizes == null) {
          uncompressedColumnSizes = new long[keyBuffer.getColumnNumber()];
        }
        if (compressedColumnSizes == null) {
          compressedColumnSizes = new long[keyBuffer.getColumnNumber()];
        }
        for (int i = 0; i < keyBuffer.getColumnNumber(); i++) {
          uncompressedColumnSizes[i] += keyBuffer.getEachColumnUncompressedValueLen()[i];
          compressedColumnSizes[i] += keyBuffer.getEachColumnValueLen()[i];
        }
        rowNo += keyBuffer.getNumberRows();
      }

      if (columnSizes && uncompressedColumnSizes != null && compressedColumnSizes != null) {
        // Print out the sizes, if pretty is set, print it out in a human friendly format,
        // otherwise print it out as if it were a row
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.io.RCFile.KeyBuffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.