Examples of TableSplit


Examples of org.apache.hadoop.hbase.mapreduce.TableSplit

    InputSplit split,
    JobConf jobConf,
    final Reporter reporter) throws IOException {

    HBaseSplit hbaseSplit = (HBaseSplit) split;
    TableSplit tableSplit = hbaseSplit.getSplit();
    String hbaseTableName = jobConf.get(HBaseSerDe.HBASE_TABLE_NAME);
    setHTable(new HTable(new HBaseConfiguration(jobConf), Bytes.toBytes(hbaseTableName)));
    String hbaseColumnsMapping = jobConf.get(HBaseSerDe.HBASE_COLUMNS_MAPPING);
    List<String> hbaseColumnFamilies = new ArrayList<String>();
    List<String> hbaseColumnQualifiers = new ArrayList<String>();
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.TableSplit

    // make it compare as the very next value after startRow
    byte [] stopRow = new byte[startRow.length + 1];
    System.arraycopy(startRow, 0, stopRow, 0, startRow.length);

    if (tableSplit != null) {
      tableSplit = new TableSplit(
        tableSplit.getTableName(),
        startRow,
        stopRow,
        tableSplit.getRegionLocation());
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.TableSplit

    @Override
    public WritableComparable<InputSplit> getSplitComparable(InputSplit split)
            throws IOException {
        return new WritableComparable<InputSplit>() {
            TableSplit tsplit = new TableSplit();

            @Override
            public void readFields(DataInput in) throws IOException {
                tsplit.readFields(in);
            }

            @Override
            public void write(DataOutput out) throws IOException {
                tsplit.write(out);
            }

            @Override
            public int compareTo(InputSplit split) {
                return tsplit.compareTo((TableSplit) split);
            }
        };
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.TableSplit

    public List<InputSplit> getSplits(org.apache.hadoop.mapreduce.JobContext context)
    throws IOException {
        List<InputSplit> splits = super.getSplits(context);
        ListIterator<InputSplit> splitIter = splits.listIterator();
        while (splitIter.hasNext()) {
            TableSplit split = (TableSplit) splitIter.next();
            byte[] startKey = split.getStartRow();
            byte[] endKey = split.getEndRow();
            // Skip if the region doesn't satisfy configured options.
            if ((skipRegion(CompareOp.LESS, startKey, lt_)) ||
                    (skipRegion(CompareOp.GREATER, endKey, gt_)) ||
                    (skipRegion(CompareOp.GREATER, endKey, gte_)) ||
                    (skipRegion(CompareOp.LESS_OR_EQUAL, startKey, lte_)) )  {
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.TableSplit

    @Override
    public WritableComparable<InputSplit> getSplitComparable(InputSplit split)
            throws IOException {
        return new WritableComparable<InputSplit>() {
            TableSplit tsplit = new TableSplit();

            @Override
            public void readFields(DataInput in) throws IOException {
                tsplit.readFields(in);
}

            @Override
            public void write(DataOutput out) throws IOException {
                tsplit.write(out);
            }

            @Override
            public int compareTo(InputSplit split) {
                return tsplit.compareTo((TableSplit) split);
            }
        };
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.TableSplit

    InputSplit split,
    JobConf jobConf,
    final Reporter reporter) throws IOException {

    HBaseSplit hbaseSplit = (HBaseSplit) split;
    TableSplit tableSplit = hbaseSplit.getSplit();
    String hbaseTableName = jobConf.get(HBaseSerDe.HBASE_TABLE_NAME);
    setHTable(new HTable(HBaseConfiguration.create(jobConf), Bytes.toBytes(hbaseTableName)));
    String hbaseColumnsMapping = jobConf.get(HBaseSerDe.HBASE_COLUMNS_MAPPING);
    List<Integer> readColIDs = ColumnProjectionUtils.getReadColumnIDs(jobConf);
    List<ColumnMapping> columnsMapping = null;
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.TableSplit

      } else {
        throw new IOException(comparisonOp + " is not a supported comparison operator");
      }
    }
    if (tableSplit != null) {
      tableSplit = new TableSplit(
        tableSplit.getTableName(),
        startRow,
        stopRow,
        tableSplit.getRegionLocation());
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.TableSplit

    @Override
    public WritableComparable<InputSplit> getSplitComparable(InputSplit split)
            throws IOException {
        return new WritableComparable<InputSplit>() {
            TableSplit tsplit = new TableSplit();

            @Override
            public void readFields(DataInput in) throws IOException {
                tsplit.readFields(in);
}

            @Override
            public void write(DataOutput out) throws IOException {
                tsplit.write(out);
            }

            @Override
            public int compareTo(InputSplit split) {
                return tsplit.compareTo((TableSplit) split);
            }
        };
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.TableSplit

        scan.setTimeRange(startTime, endTime);
        if (versions >= 0) {
          scan.setMaxVersions(versions);
        }

        final TableSplit tableSplit = (TableSplit)(context.getInputSplit());
        HConnectionManager.execute(new HConnectable<Void>(conf) {
          @Override
          public Void connect(HConnection conn) throws IOException {
            String zkClusterKey = conf.get(NAME + ".peerQuorumAddress");
            Configuration peerConf = HBaseConfiguration.create(conf);
            ZKUtil.applyClusterKeyToConf(peerConf, zkClusterKey);

            TableName tableName = TableName.valueOf(conf.get(NAME + ".tableName"));
            // TODO: THis HTable doesn't get closed.  Fix!
            Table replicatedTable = new HTable(peerConf, tableName);
            scan.setStartRow(value.getRow());
            scan.setStopRow(tableSplit.getEndRow());
            replicatedScanner = replicatedTable.getScanner(scan);
            return null;
          }
        });
        currentCompareRowInPeerTable = replicatedScanner.next();
View Full Code Here

Examples of org.apache.hadoop.hbase.mapreduce.TableSplit

            InterruptedException {

        context.progress();
        context.getCounter(HBaseIndexerCounters.INPUT_ROWS).increment(1L);
        try {
            TableSplit tableSplit;
            if (context.getInputSplit() instanceof TableSplit) {
                tableSplit = (TableSplit) context.getInputSplit();
                indexer.indexRowData(ImmutableList.<RowData>of(new ResultWrappingRowData(result,
                        tableSplit.getTableName())));
            } else {
                throw new IOException("Input split not of type " + TableSplit.class + " but " +
                        context.getInputSplit().getClass());
            }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.