Package org.apache.hadoop.hive.ql.io

Examples of org.apache.hadoop.hive.ql.io.RCFileRecordReader$RCFileSyncEntry


      length = fileLen - start;
    }

    //share the code with RecordReader.
    FileSplit split = new FileSplit(fileName,start, length, new JobConf(conf));
    RCFileRecordReader recordReader = new RCFileRecordReader(conf, split);
    LongWritable key = new LongWritable();
    BytesRefArrayWritable value = new BytesRefArrayWritable();
    StringBuilder buf = new StringBuilder(STRING_BUFFER_SIZE); // extra capacity in case we overrun, to avoid resizing
    while (recordReader.next(key, value)) {
      printRecord(value, buf);
      recordCount++;
      if (verbose && (recordCount % RECORD_PRINT_INTERVAL) == 0) {
        long now = System.currentTimeMillis();
        System.err.println("Read " + recordCount/1024 + "k records");
        System.err.println("Read " + ((recordReader.getPos() / (1024L*1024L)))
                                                                      + "MB");
        System.err.printf("Input scan rate %.2f MB/s\n",
                  (recordReader.getPos() * 1.0 / (now - startT)) / 1024.0);
      }
      if (buf.length() > STRING_BUFFER_FLUSH_SIZE) {
        System.out.print(buf.toString());
        buf.setLength(0);
      }
View Full Code Here


      length = fileLen - start;
    }

    //share the code with RecordReader.
    FileSplit split = new FileSplit(fileName,start, length, new JobConf(conf));
    RCFileRecordReader recordReader = new RCFileRecordReader(conf, split);

    if (columnSizes || fileSizes) {
      // Print out the un/compressed sizes of each column
      long[] compressedColumnSizes = null;
      long[] uncompressedColumnSizes = null;
      // un/compressed sizes of file and no. of rows
      long rowNo = 0;
      long uncompressedFileSize = 0;
      long compressedFileSize = 0;
      // Skip from block to block since we only need the header
      while (recordReader.nextBlock()) {
        // Get the sizes from the key buffer and aggregate
        KeyBuffer keyBuffer = recordReader.getKeyBuffer();
        if (uncompressedColumnSizes == null) {
          uncompressedColumnSizes = new long[keyBuffer.getColumnNumber()];
        }
        if (compressedColumnSizes == null) {
          compressedColumnSizes = new long[keyBuffer.getColumnNumber()];
        }
        for (int i = 0; i < keyBuffer.getColumnNumber(); i++) {
          uncompressedColumnSizes[i] += keyBuffer.getEachColumnUncompressedValueLen()[i];
          compressedColumnSizes[i] += keyBuffer.getEachColumnValueLen()[i];
        }
        rowNo += keyBuffer.getNumberRows();
      }

      if (columnSizes && uncompressedColumnSizes != null && compressedColumnSizes != null) {
        // Print out the sizes, if pretty is set, print it out in a human friendly format,
        // otherwise print it out as if it were a row
        for (int i = 0; i < uncompressedColumnSizes.length; i++) {
          if (pretty) {
            System.out.println("Column " + i + ": Uncompressed size: " +
                uncompressedColumnSizes[i] + " Compressed size: " + compressedColumnSizes[i]);
          } else {
            System.out.print(i + TAB + uncompressedColumnSizes[i] + TAB +
                compressedColumnSizes[i] + NEWLINE);
          }
        }
      }

      if (fileSizes) {
        if (uncompressedColumnSizes != null && compressedColumnSizes != null) {
          for (int i = 0; i < uncompressedColumnSizes.length; i++) {
            uncompressedFileSize += uncompressedColumnSizes[i];
            compressedFileSize += compressedColumnSizes[i];
          }
        }
        System.out.print("File size (uncompressed): " + uncompressedFileSize
            + ". File size (compressed): " + compressedFileSize + ". Number of rows: " + rowNo
            + "." + NEWLINE);
      }

      System.out.flush();
      return 0;
    }

    LongWritable key = new LongWritable();
    BytesRefArrayWritable value = new BytesRefArrayWritable();
    StringBuilder buf = new StringBuilder(STRING_BUFFER_SIZE); // extra capacity in case we overrun, to avoid resizing
    while (recordReader.next(key, value)) {
      printRecord(value, buf);
      recordCount++;
      if (verbose && (recordCount % RECORD_PRINT_INTERVAL) == 0) {
        long now = System.currentTimeMillis();
        System.err.println("Read " + recordCount/1024 + "k records");
        System.err.println("Read " + ((recordReader.getPos() / (1024L*1024L)))
                                                                      + "MB");
        System.err.printf("Input scan rate %.2f MB/s\n",
                  (recordReader.getPos() * 1.0 / (now - startT)) / 1024.0);
      }
      if (buf.length() > STRING_BUFFER_FLUSH_SIZE) {
        System.out.print(buf.toString());
        buf.setLength(0);
      }
View Full Code Here

      length = fileLen - start;
    }

    //share the code with RecordReader.
    FileSplit split = new FileSplit(fileName,start, length, new JobConf(conf));
    RCFileRecordReader recordReader = new RCFileRecordReader(conf, split);

    if (columnSizes || fileSizes) {
      // Print out the un/compressed sizes of each column
      long[] compressedColumnSizes = null;
      long[] uncompressedColumnSizes = null;
      // un/compressed sizes of file and no. of rows
      long rowNo = 0;
      long uncompressedFileSize = 0;
      long compressedFileSize = 0;
      // Skip from block to block since we only need the header
      while (recordReader.nextBlock()) {
        // Get the sizes from the key buffer and aggregate
        KeyBuffer keyBuffer = recordReader.getKeyBuffer();
        if (uncompressedColumnSizes == null) {
          uncompressedColumnSizes = new long[keyBuffer.getColumnNumber()];
        }
        if (compressedColumnSizes == null) {
          compressedColumnSizes = new long[keyBuffer.getColumnNumber()];
        }
        for (int i = 0; i < keyBuffer.getColumnNumber(); i++) {
          uncompressedColumnSizes[i] += keyBuffer.getEachColumnUncompressedValueLen()[i];
          compressedColumnSizes[i] += keyBuffer.getEachColumnValueLen()[i];
        }
        rowNo += keyBuffer.getNumberRows();
      }

      if (columnSizes && uncompressedColumnSizes != null && compressedColumnSizes != null) {
        // Print out the sizes, if pretty is set, print it out in a human friendly format,
        // otherwise print it out as if it were a row
        for (int i = 0; i < uncompressedColumnSizes.length; i++) {
          if (pretty) {
            System.out.println("Column " + i + ": Uncompressed size: " +
                uncompressedColumnSizes[i] + " Compressed size: " + compressedColumnSizes[i]);
          } else {
            System.out.print(i + TAB + uncompressedColumnSizes[i] + TAB +
                compressedColumnSizes[i] + NEWLINE);
          }
        }
      }

      if (fileSizes) {
        if (uncompressedColumnSizes != null && compressedColumnSizes != null) {
          for (int i = 0; i < uncompressedColumnSizes.length; i++) {
            uncompressedFileSize += uncompressedColumnSizes[i];
            compressedFileSize += compressedColumnSizes[i];
          }
        }
        System.out.print("File size (uncompressed): " + uncompressedFileSize
            + ". File size (compressed): " + compressedFileSize + ". Number of rows: " + rowNo
            + "." + NEWLINE);
      }

      System.out.flush();
      return 0;
    }

    LongWritable key = new LongWritable();
    BytesRefArrayWritable value = new BytesRefArrayWritable();
    StringBuilder buf = new StringBuilder(STRING_BUFFER_SIZE); // extra capacity in case we overrun, to avoid resizing
    while (recordReader.next(key, value)) {
      printRecord(value, buf);
      recordCount++;
      if (verbose && (recordCount % RECORD_PRINT_INTERVAL) == 0) {
        long now = System.currentTimeMillis();
        System.err.println("Read " + recordCount/1024 + "k records");
        System.err.println("Read " + ((recordReader.getPos() / (1024L*1024L)))
                                                                      + "MB");
        System.err.printf("Input scan rate %.2f MB/s\n",
                  (recordReader.getPos() * 1.0 / (now - startT)) / 1024.0);
      }
      if (buf.length() > STRING_BUFFER_FLUSH_SIZE) {
        System.out.print(buf.toString());
        buf.setLength(0);
      }
View Full Code Here

      length = fileLen - start;
    }
   
    //share the code with RecordReader.
    FileSplit split = new FileSplit(fileName,start, length, new JobConf(conf));
    RCFileRecordReader recordReader = new RCFileRecordReader(conf, split);
    LongWritable key = new LongWritable();
    BytesRefArrayWritable value = new BytesRefArrayWritable();
    Text txt = new Text();
    while (recordReader.next(key, value)) {
      txt.clear();
      for (int i = 0; i < value.size(); i++) {
        BytesRefWritable v = value.get(i);
        txt.set(v.getData(), v.getStart(), v.getLength());
        System.out.print(txt.toString());
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.io.RCFileRecordReader$RCFileSyncEntry

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.