Package org.apache.hadoop.hbase.regionserver.compactions

Examples of org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor


    this.config = new StripeStoreConfig(conf, store);
    this.compactionPolicy = new StripeCompactionPolicy(conf, store, config);
    this.storeFileManager = new StripeStoreFileManager(comparator, conf, this.config);
    this.storeFlusher = new StripeStoreFlusher(
      conf, store, this.compactionPolicy, this.storeFileManager);
    this.compactor = new StripeCompactor(conf, store);
  }
View Full Code Here


  public static void verifyBoundaryCompaction(KeyValue[] input, byte[][] boundaries,
      KeyValue[][] output, byte[] majorFrom, byte[] majorTo, boolean allFiles)
          throws Exception {
    StoreFileWritersCapture writers = new StoreFileWritersCapture();
    StripeCompactor sc = createCompactor(writers, input);
    List<Path> paths =
        sc.compact(createDummyRequest(), Arrays.asList(boundaries), majorFrom, majorTo);
    writers.verifyKvs(output, allFiles, true);
    if (allFiles) {
      assertEquals(output.length, paths.size());
      writers.verifyBoundaries(boundaries);
    }
View Full Code Here

  }

  public static void verifySizeCompaction(KeyValue[] input, int targetCount, long targetSize,
      byte[] left, byte[] right, KeyValue[][] output) throws Exception {
    StoreFileWritersCapture writers = new StoreFileWritersCapture();
    StripeCompactor sc = createCompactor(writers, input);
    List<Path> paths = sc.compact(
        createDummyRequest(), targetCount, targetSize, left, right, null, null);
    assertEquals(output.length, paths.size());
    writers.verifyKvs(output, true, true);
    List<byte[]> boundaries = new ArrayList<byte[]>();
    boundaries.add(left);
View Full Code Here

    when(store.getRegionInfo()).thenReturn(new HRegionInfo(TABLE_NAME));
    when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class),
        anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
    when(store.getComparator()).thenReturn(new KVComparator());

    return new StripeCompactor(conf, store) {
      @Override
      protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners,
          long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow,
          byte[] dropDeletesToRow) throws IOException {
        return scanner;
View Full Code Here

    int targetCount = 2;
    conf.setInt(StripeStoreConfig.INITIAL_STRIPE_COUNT_KEY, targetCount);
    conf.setInt(StripeStoreConfig.MIN_FILES_L0_KEY, 2);
    conf.set(StoreEngine.STORE_ENGINE_CLASS_KEY, TestStoreEngine.class.getName());
    TestStoreEngine se = createEngine(conf);
    StripeCompactor mockCompactor = mock(StripeCompactor.class);
    se.setCompactorOverride(mockCompactor);
    when(mockCompactor.compact(any(CompactionRequest.class), anyInt(), anyLong(),
        any(byte[].class), any(byte[].class), any(byte[].class), any(byte[].class)))
        .thenReturn(new ArrayList<Path>());

    // Produce 3 L0 files.
    StoreFile sf = createFile();
View Full Code Here

    this.config = new StripeStoreConfig(conf, store);
    this.compactionPolicy = new StripeCompactionPolicy(conf, store, config);
    this.storeFileManager = new StripeStoreFileManager(comparator, conf, this.config);
    this.storeFlusher = new StripeStoreFlusher(
      conf, store, this.compactionPolicy, this.storeFileManager);
    this.compactor = new StripeCompactor(conf, store);
  }
View Full Code Here

  public static void verifyBoundaryCompaction(KeyValue[] input, byte[][] boundaries,
      KeyValue[][] output, byte[] majorFrom, byte[] majorTo, boolean allFiles)
          throws Exception {
    StoreFileWritersCapture writers = new StoreFileWritersCapture();
    StripeCompactor sc = createCompactor(writers, input);
    List<Path> paths =
        sc.compact(createDummyRequest(), Arrays.asList(boundaries), majorFrom, majorTo);
    writers.verifyKvs(output, allFiles, true);
    if (allFiles) {
      assertEquals(output.length, paths.size());
      writers.verifyBoundaries(boundaries);
    }
View Full Code Here

  }

  public static void verifySizeCompaction(KeyValue[] input, int targetCount, long targetSize,
      byte[] left, byte[] right, KeyValue[][] output) throws Exception {
    StoreFileWritersCapture writers = new StoreFileWritersCapture();
    StripeCompactor sc = createCompactor(writers, input);
    List<Path> paths = sc.compact(
        createDummyRequest(), targetCount, targetSize, left, right, null, null);
    assertEquals(output.length, paths.size());
    writers.verifyKvs(output, true, true);
    List<byte[]> boundaries = new ArrayList<byte[]>();
    boundaries.add(left);
View Full Code Here

    when(store.getRegionInfo()).thenReturn(new HRegionInfo(TABLE_NAME));
    when(store.createWriterInTmp(anyLong(), any(Compression.Algorithm.class),
        anyBoolean(), anyBoolean(), anyBoolean())).thenAnswer(writers);
    when(store.getComparator()).thenReturn(new KVComparator());

    return new StripeCompactor(conf, store) {
      @Override
      protected InternalScanner createScanner(Store store, List<StoreFileScanner> scanners,
          long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow,
          byte[] dropDeletesToRow) throws IOException {
        return scanner;
View Full Code Here

    int targetCount = 2;
    conf.setInt(StripeStoreConfig.INITIAL_STRIPE_COUNT_KEY, targetCount);
    conf.setInt(StripeStoreConfig.MIN_FILES_L0_KEY, 2);
    conf.set(StoreEngine.STORE_ENGINE_CLASS_KEY, TestStoreEngine.class.getName());
    TestStoreEngine se = createEngine(conf);
    StripeCompactor mockCompactor = mock(StripeCompactor.class);
    se.setCompactorOverride(mockCompactor);
    when(mockCompactor.compact(any(CompactionRequest.class), anyInt(), anyLong(),
        any(byte[].class), any(byte[].class), any(byte[].class), any(byte[].class)))
        .thenReturn(new ArrayList<Path>());

    // Produce 3 L0 files.
    StoreFile sf = createFile();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.