Package org.apache.hadoop.hbase.thrift2.generated

Examples of org.apache.hadoop.hbase.thrift2.generated.THBaseService


    handler.put(table, put);

    TDelete delete = new TDelete(wrap(rowName));
    List<TColumn> deleteColumns = new ArrayList<TColumn>();
    TColumn deleteColumn = new TColumn(wrap(familyAname));
    deleteColumn.setQualifier(qualifierAname);
    deleteColumns.add(deleteColumn);
    delete.setColumns(deleteColumns);

    handler.deleteSingle(table, delete);
View Full Code Here


    TResult result = handler.get(table, get);
    assertEquals(2, result.getColumnValuesSize());

    TDelete delete = new TDelete(wrap(rowName));
    List<TColumn> deleteColumns = new ArrayList<TColumn>();
    TColumn deleteColumn = new TColumn(wrap(familyAname));
    deleteColumn.setQualifier(qualifierAname);
    deleteColumns.add(deleteColumn);
    delete.setColumns(deleteColumns);
    delete.setDeleteType(TDeleteType.DELETE_COLUMNS); // This is the default anyway.

    handler.deleteSingle(table, delete);
View Full Code Here

    TResult result = handler.get(table, get);
    assertEquals(2, result.getColumnValuesSize());

    TDelete delete = new TDelete(wrap(rowName));
    List<TColumn> deleteColumns = new ArrayList<TColumn>();
    TColumn deleteColumn = new TColumn(wrap(familyAname));
    deleteColumn.setQualifier(qualifierAname);
    deleteColumns.add(deleteColumn);
    delete.setColumns(deleteColumns);
    delete.setDeleteType(TDeleteType.DELETE_COLUMN);

    handler.deleteSingle(table, delete);
View Full Code Here

    }

    // create scan instance
    TScan scan = new TScan();
    List<TColumn> columns = new ArrayList<TColumn>();
    TColumn column = new TColumn();
    column.setFamily(familyAname);
    column.setQualifier(qualifierAname);
    columns.add(column);
    scan.setColumns(columns);
    scan.setStartRow("testScan".getBytes());
    scan.setStopRow("testScan\uffff".getBytes());
View Full Code Here

    }

    // create scan instance with filter
    TScan scan = new TScan();
    List<TColumn> columns = new ArrayList<TColumn>();
    TColumn column = new TColumn();
    column.setFamily(familyAname);
    column.setQualifier(qualifierAname);
    columns.add(column);
    scan.setColumns(columns);
    scan.setStartRow("testScanWithFilter".getBytes());
    scan.setStopRow("testScanWithFilter\uffff".getBytes());
    // only get the key part
View Full Code Here

    handler.put(table, put);

    // create scan instance
    TScan scan = new TScan();
    List<TColumn> columns = new ArrayList<TColumn>();
    TColumn column = new TColumn();
    column.setFamily(familyAname);
    columns.add(column);
    scan.setColumns(columns);
    scan.setStartRow("testScanWithBatchSize".getBytes());
    scan.setStopRow("testScanWithBatchSize\uffff".getBytes());
    // set batch size to 10 columns per call
View Full Code Here

    }

    // create scan instance
    TScan scan = new TScan();
    List<TColumn> columns = new ArrayList<TColumn>();
    TColumn column = new TColumn();
    column.setFamily(familyAname);
    column.setQualifier(qualifierAname);
    columns.add(column);
    scan.setColumns(columns);
    scan.setStartRow("testGetScannerResults".getBytes());

    // get 5 rows and check the returned results
View Full Code Here

    }

    // Map<family, List<KeyValue>>
    for (Map.Entry<byte[], List<org.apache.hadoop.hbase.Cell>> familyEntry:
        in.getFamilyCellMap().entrySet()) {
      TColumn column = new TColumn(ByteBuffer.wrap(familyEntry.getKey()));
      for (org.apache.hadoop.hbase.Cell cell: familyEntry.getValue()) {
        KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
        byte[] family = kv.getFamily();
        byte[] qualifier = kv.getQualifier();
        long timestamp = kv.getTimestamp();
        if (family != null) {
          column.setFamily(family);
        }
        if (qualifier != null) {
          column.setQualifier(qualifier);
        }
        if (timestamp != HConstants.LATEST_TIMESTAMP) {
          column.setTimestamp(kv.getTimestamp());
        }
      }
      columns.add(column);
    }
    out.setColumns(columns);
View Full Code Here

    TPut putB = new TPut(wrap(rowName), columnValuesB);
    putB.setColumnValues(columnValuesB);

    TDelete delete = new TDelete(wrap(rowName));
    List<TColumn> deleteColumns = new ArrayList<TColumn>();
    TColumn deleteColumn = new TColumn(wrap(familyAname));
    deleteColumn.setQualifier(qualifierAname);
    deleteColumns.add(deleteColumn);
    delete.setColumns(deleteColumns);

    List<TMutation> mutations = new ArrayList<TMutation>();
    TMutation mutationA = TMutation.put(putB);
View Full Code Here

    }

    // Map<family, List<KeyValue>>
    for (Map.Entry<byte[], List<org.apache.hadoop.hbase.Cell>> familyEntry:
        in.getFamilyCellMap().entrySet()) {
      TColumn column = new TColumn(ByteBuffer.wrap(familyEntry.getKey()));
      for (org.apache.hadoop.hbase.Cell cell: familyEntry.getValue()) {
        KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
        byte[] family = kv.getFamily();
        byte[] qualifier = kv.getQualifier();
        long timestamp = kv.getTimestamp();
        if (family != null) {
          column.setFamily(family);
        }
        if (qualifier != null) {
          column.setQualifier(qualifier);
        }
        if (timestamp != HConstants.LATEST_TIMESTAMP) {
          column.setTimestamp(kv.getTimestamp());
        }
      }
      columns.add(column);
    }
    out.setColumns(columns);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.thrift2.generated.THBaseService

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.