Package org.apache.hadoop.hive.ql.io

Examples of org.apache.hadoop.hive.ql.io.StatsProvidingRecordReader


                  table.getInputFormatClass(), jc);
              InputSplit dummySplit = new FileSplit(file.getPath(), 0, 0, new String[] { table
                  .getDataLocation().toString() });
              org.apache.hadoop.mapred.RecordReader<?, ?> recordReader = (org.apache.hadoop.mapred.RecordReader<?, ?>) inputFormat
                  .getRecordReader(dummySplit, jc, Reporter.NULL);
              StatsProvidingRecordReader statsRR;
              if (recordReader instanceof StatsProvidingRecordReader) {
                statsRR = (StatsProvidingRecordReader) recordReader;
                numRows += statsRR.getStats().getRowCount();
                rawDataSize += statsRR.getStats().getRawDataSize();
                fileSize += file.getLen();
                numFiles += 1;
                statsAvailable = true;
              }
            }
View Full Code Here


            InputFormat<?, ?> inputFormat = (InputFormat<?, ?>) ReflectionUtils.newInstance(
                partn.getInputFormatClass(), jc);
            InputSplit dummySplit = new FileSplit(file.getPath(), 0, 0,
                new String[] { partn.getLocation() });
            Object recordReader = inputFormat.getRecordReader(dummySplit, jc, Reporter.NULL);
            StatsProvidingRecordReader statsRR;
            if (recordReader instanceof StatsProvidingRecordReader) {
              statsRR = (StatsProvidingRecordReader) recordReader;
              rawDataSize += statsRR.getStats().getRawDataSize();
              numRows += statsRR.getStats().getRowCount();
              fileSize += file.getLen();
              numFiles += 1;
              statsAvailable = true;
            }
          }
View Full Code Here

                  table.getInputFormatClass(), jc);
              InputSplit dummySplit = new FileSplit(file.getPath(), 0, 0, new String[] { table
                  .getDataLocation().toString() });
              org.apache.hadoop.mapred.RecordReader<?, ?> recordReader = (org.apache.hadoop.mapred.RecordReader<?, ?>) inputFormat
                  .getRecordReader(dummySplit, jc, Reporter.NULL);
              StatsProvidingRecordReader statsRR;
              if (recordReader instanceof StatsProvidingRecordReader) {
                statsRR = (StatsProvidingRecordReader) recordReader;
                numRows += statsRR.getStats().getRowCount();
                rawDataSize += statsRR.getStats().getRawDataSize();
                fileSize += file.getLen();
                numFiles += 1;
                statsAvailable = true;
              }
              recordReader.close();
View Full Code Here

            InputSplit dummySplit = new FileSplit(file.getPath(), 0, 0,
                new String[] { partn.getLocation() });
            org.apache.hadoop.mapred.RecordReader<?, ?> recordReader =
                (org.apache.hadoop.mapred.RecordReader<?, ?>)
                inputFormat.getRecordReader(dummySplit, jc, Reporter.NULL);
            StatsProvidingRecordReader statsRR;
            if (recordReader instanceof StatsProvidingRecordReader) {
              statsRR = (StatsProvidingRecordReader) recordReader;
              rawDataSize += statsRR.getStats().getRawDataSize();
              numRows += statsRR.getStats().getRowCount();
              fileSize += file.getLen();
              numFiles += 1;
              statsAvailable = true;
            }
            recordReader.close();
View Full Code Here

                  table.getInputFormatClass(), jc);
              InputSplit dummySplit = new FileSplit(file.getPath(), 0, 0, new String[] { table
                  .getDataLocation().toString() });
              org.apache.hadoop.mapred.RecordReader<?, ?> recordReader = (org.apache.hadoop.mapred.RecordReader<?, ?>) inputFormat
                  .getRecordReader(dummySplit, jc, Reporter.NULL);
              StatsProvidingRecordReader statsRR;
              if (recordReader instanceof StatsProvidingRecordReader) {
                statsRR = (StatsProvidingRecordReader) recordReader;
                numRows += statsRR.getStats().getRowCount();
                rawDataSize += statsRR.getStats().getRawDataSize();
                fileSize += file.getLen();
                numFiles += 1;
                statsAvailable = true;
              }
            }
View Full Code Here

            InputFormat<?, ?> inputFormat = (InputFormat<?, ?>) ReflectionUtils.newInstance(
                partn.getInputFormatClass(), jc);
            InputSplit dummySplit = new FileSplit(file.getPath(), 0, 0,
                new String[] { partn.getLocation() });
            Object recordReader = inputFormat.getRecordReader(dummySplit, jc, Reporter.NULL);
            StatsProvidingRecordReader statsRR;
            if (recordReader instanceof StatsProvidingRecordReader) {
              statsRR = (StatsProvidingRecordReader) recordReader;
              rawDataSize += statsRR.getStats().getRawDataSize();
              numRows += statsRR.getStats().getRowCount();
              fileSize += file.getLen();
              numFiles += 1;
              statsAvailable = true;
            }
          }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.io.StatsProvidingRecordReader

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.