Package org.apache.hadoop.hbase.util

Examples of org.apache.hadoop.hbase.util.EnvironmentEdge


        + status.getAccessTime();
  }

  @Test(timeout = 60 *1000)
  public void testHFileCleaning() throws Exception {
    final EnvironmentEdge originalEdge = EnvironmentEdgeManager.getDelegate();
    String prefix = "someHFileThatWouldBeAUUID";
    Configuration conf = UTIL.getConfiguration();
    // set TTL
    long ttl = 2000;
    conf.set(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS,
      "org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner");
    conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, ttl);
    Server server = new DummyServer();
    Path archivedHfileDir = new Path(UTIL.getDataTestDir(), HConstants.HFILE_ARCHIVE_DIRECTORY);
    FileSystem fs = FileSystem.get(conf);
    HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir);

    // Create 2 invalid files, 1 "recent" file, 1 very new file and 30 old files
    final long createTime = System.currentTimeMillis();
    fs.delete(archivedHfileDir, true);
    fs.mkdirs(archivedHfileDir);
    // Case 1: 1 invalid file, which should be deleted directly
    fs.createNewFile(new Path(archivedHfileDir, "dfd-dfd"));
    // Case 2: 1 "recent" file, not even deletable for the first log cleaner
    // (TimeToLiveLogCleaner), so we are not going down the chain
    LOG.debug("Now is: " + createTime);
    for (int i = 1; i < 32; i++) {
      // Case 3: old files which would be deletable for the first log cleaner
      // (TimeToLiveHFileCleaner),
      Path fileName = new Path(archivedHfileDir, (prefix + "." + (createTime + i)));
      fs.createNewFile(fileName);
      // set the creation time past ttl to ensure that it gets removed
      fs.setTimes(fileName, createTime - ttl - 1, -1);
      LOG.debug("Creating " + getFileStats(fileName, fs));
    }

    // Case 2: 1 newer file, not even deletable for the first log cleaner
    // (TimeToLiveLogCleaner), so we are not going down the chain
    Path saved = new Path(archivedHfileDir, "thisFileShouldBeSaved.00000000000");
    fs.createNewFile(saved);
    // set creation time in the future, so definitely within TTL
    fs.setTimes(saved, createTime + (ttl * 2), -1);
    LOG.debug("Creating " + getFileStats(saved, fs));

    assertEquals(33, fs.listStatus(archivedHfileDir).length);

    // set a custom edge manager to handle time checking
    EnvironmentEdge setTime = new EnvironmentEdge() {
      @Override
      public long currentTimeMillis() {
        return createTime;
      }
    };
View Full Code Here


        + status.getAccessTime();
  }

  @Test(timeout = 60 *1000)
  public void testHFileCleaning() throws Exception {
    final EnvironmentEdge originalEdge = EnvironmentEdgeManager.getDelegate();
    String prefix = "someHFileThatWouldBeAUUID";
    Configuration conf = UTIL.getConfiguration();
    // set TTL
    long ttl = 2000;
    conf.set(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS,
      "org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner");
    conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, ttl);
    Server server = new DummyServer();
    Path archivedHfileDir = new Path(UTIL.getDataTestDir(), HConstants.HFILE_ARCHIVE_DIRECTORY);
    FileSystem fs = FileSystem.get(conf);
    HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir);

    // Create 2 invalid files, 1 "recent" file, 1 very new file and 30 old files
    final long createTime = System.currentTimeMillis();
    fs.delete(archivedHfileDir, true);
    fs.mkdirs(archivedHfileDir);
    // Case 1: 1 invalid file, which should be deleted directly
    fs.createNewFile(new Path(archivedHfileDir, "dfd-dfd"));
    // Case 2: 1 "recent" file, not even deletable for the first log cleaner
    // (TimeToLiveLogCleaner), so we are not going down the chain
    LOG.debug("Now is: " + createTime);
    for (int i = 1; i < 32; i++) {
      // Case 3: old files which would be deletable for the first log cleaner
      // (TimeToLiveHFileCleaner),
      Path fileName = new Path(archivedHfileDir, (prefix + "." + (createTime + i)));
      fs.createNewFile(fileName);
      // set the creation time past ttl to ensure that it gets removed
      fs.setTimes(fileName, createTime - ttl - 1, -1);
      LOG.debug("Creating " + getFileStats(fileName, fs));
    }

    // Case 2: 1 newer file, not even deletable for the first log cleaner
    // (TimeToLiveLogCleaner), so we are not going down the chain
    Path saved = new Path(archivedHfileDir, "thisFileShouldBeSaved.00000000000");
    fs.createNewFile(saved);
    // set creation time in the future, so definitely within TTL
    fs.setTimes(saved, createTime + (ttl * 2), -1);
    LOG.debug("Creating " + getFileStats(saved, fs));

    assertEquals(33, fs.listStatus(archivedHfileDir).length);

    // set a custom edge manager to handle time checking
    EnvironmentEdge setTime = new EnvironmentEdge() {
      @Override
      public long currentTimeMillis() {
        return createTime;
      }
    };
View Full Code Here

  }

  public void testDeleteMarkerLongevity() throws Exception {
    try {
      final long now = System.currentTimeMillis();
      EnvironmentEdgeManagerTestHelper.injectEdge(new EnvironmentEdge() {
        public long currentTimeMillis() {
          return now;
        }
      });
      KeyValue[] kvs = new KeyValue[]{
View Full Code Here

    VerifyingIndexCodec codec = new VerifyingIndexCodec();
    builder.setIndexCodecForTesting(codec);

    // setup the Puts we want to write
    final long ts = System.currentTimeMillis();
    EnvironmentEdge edge = new EnvironmentEdge() {

      @Override
      public long currentTimeMillis() {
        return ts;
      }
View Full Code Here

  }

  public void testDeleteMarkerLongevity() throws Exception {
    try {
      final long now = System.currentTimeMillis();
      EnvironmentEdgeManagerTestHelper.injectEdge(new EnvironmentEdge() {
        public long currentTimeMillis() {
          return now;
        }
      });
      KeyValue[] kvs = new KeyValue[]{
View Full Code Here

        + status.getAccessTime();
  }

  @Test(timeout = 60 *1000)
  public void testHFileCleaning() throws Exception {
    final EnvironmentEdge originalEdge = EnvironmentEdgeManager.getDelegate();
    String prefix = "someHFileThatWouldBeAUUID";
    Configuration conf = UTIL.getConfiguration();
    // set TTL
    long ttl = 2000;
    conf.set(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS,
      "org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner");
    conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, ttl);
    Server server = new DummyServer();
    Path archivedHfileDir = new Path(UTIL.getDataTestDirOnTestFS(), HConstants.HFILE_ARCHIVE_DIRECTORY);
    FileSystem fs = FileSystem.get(conf);
    HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir);

    // Create 2 invalid files, 1 "recent" file, 1 very new file and 30 old files
    final long createTime = System.currentTimeMillis();
    fs.delete(archivedHfileDir, true);
    fs.mkdirs(archivedHfileDir);
    // Case 1: 1 invalid file, which should be deleted directly
    fs.createNewFile(new Path(archivedHfileDir, "dfd-dfd"));
    // Case 2: 1 "recent" file, not even deletable for the first log cleaner
    // (TimeToLiveLogCleaner), so we are not going down the chain
    LOG.debug("Now is: " + createTime);
    for (int i = 1; i < 32; i++) {
      // Case 3: old files which would be deletable for the first log cleaner
      // (TimeToLiveHFileCleaner),
      Path fileName = new Path(archivedHfileDir, (prefix + "." + (createTime + i)));
      fs.createNewFile(fileName);
      // set the creation time past ttl to ensure that it gets removed
      fs.setTimes(fileName, createTime - ttl - 1, -1);
      LOG.debug("Creating " + getFileStats(fileName, fs));
    }

    // Case 2: 1 newer file, not even deletable for the first log cleaner
    // (TimeToLiveLogCleaner), so we are not going down the chain
    Path saved = new Path(archivedHfileDir, prefix + ".00000000000");
    fs.createNewFile(saved);
    // set creation time within the ttl
    fs.setTimes(saved, createTime - ttl / 2, -1);
    LOG.debug("Creating " + getFileStats(saved, fs));
    for (FileStatus stat : fs.listStatus(archivedHfileDir)) {
      LOG.debug(stat.getPath().toString());
    }

    assertEquals(33, fs.listStatus(archivedHfileDir).length);

    // set a custom edge manager to handle time checking
    EnvironmentEdge setTime = new EnvironmentEdge() {
      @Override
      public long currentTimeMillis() {
        return createTime;
      }
    };
View Full Code Here

  }

  public void testDeleteMarkerLongevity() throws Exception {
    try {
      final long now = System.currentTimeMillis();
      EnvironmentEdgeManagerTestHelper.injectEdge(new EnvironmentEdge() {
        public long currentTimeMillis() {
          return now;
        }
      });
      KeyValue[] kvs = new KeyValue[]{
View Full Code Here

        + status.getAccessTime();
  }

  @Test(timeout = 60 *1000)
  public void testHFileCleaning() throws Exception {
    final EnvironmentEdge originalEdge = EnvironmentEdgeManager.getDelegate();
    String prefix = "someHFileThatWouldBeAUUID";
    Configuration conf = UTIL.getConfiguration();
    // set TTL
    long ttl = 2000;
    conf.set(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS,
      "org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner");
    conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, ttl);
    Server server = new DummyServer();
    Path archivedHfileDir = new Path(UTIL.getDataTestDirOnTestFS(), HConstants.HFILE_ARCHIVE_DIRECTORY);
    FileSystem fs = FileSystem.get(conf);
    HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir);

    // Create 2 invalid files, 1 "recent" file, 1 very new file and 30 old files
    final long createTime = System.currentTimeMillis();
    fs.delete(archivedHfileDir, true);
    fs.mkdirs(archivedHfileDir);
    // Case 1: 1 invalid file, which should be deleted directly
    fs.createNewFile(new Path(archivedHfileDir, "dfd-dfd"));
    // Case 2: 1 "recent" file, not even deletable for the first log cleaner
    // (TimeToLiveLogCleaner), so we are not going down the chain
    LOG.debug("Now is: " + createTime);
    for (int i = 1; i < 32; i++) {
      // Case 3: old files which would be deletable for the first log cleaner
      // (TimeToLiveHFileCleaner),
      Path fileName = new Path(archivedHfileDir, (prefix + "." + (createTime + i)));
      fs.createNewFile(fileName);
      // set the creation time past ttl to ensure that it gets removed
      fs.setTimes(fileName, createTime - ttl - 1, -1);
      LOG.debug("Creating " + getFileStats(fileName, fs));
    }

    // Case 2: 1 newer file, not even deletable for the first log cleaner
    // (TimeToLiveLogCleaner), so we are not going down the chain
    Path saved = new Path(archivedHfileDir, prefix + ".00000000000");
    fs.createNewFile(saved);
    // set creation time within the ttl
    fs.setTimes(saved, createTime - ttl / 2, -1);
    LOG.debug("Creating " + getFileStats(saved, fs));
    for (FileStatus stat : fs.listStatus(archivedHfileDir)) {
      LOG.debug(stat.getPath().toString());
    }

    assertEquals(33, fs.listStatus(archivedHfileDir).length);

    // set a custom edge manager to handle time checking
    EnvironmentEdge setTime = new EnvironmentEdge() {
      @Override
      public long currentTimeMillis() {
        return createTime;
      }
    };
View Full Code Here

  }

  public void testDeleteMarkerLongevity() throws Exception {
    try {
      final long now = System.currentTimeMillis();
      EnvironmentEdgeManagerTestHelper.injectEdge(new EnvironmentEdge() {
        public long currentTime() {
          return now;
        }
      });
      KeyValue[] kvs = new KeyValue[]{
View Full Code Here

    VerifyingIndexCodec codec = new VerifyingIndexCodec();
    builder.setIndexCodecForTesting(codec);

    // setup the Puts we want to write
    final long ts = System.currentTimeMillis();
    EnvironmentEdge edge = new EnvironmentEdge() {

      @Override
      public long currentTimeMillis() {
        return ts;
      }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.util.EnvironmentEdge

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.