Package org.apache.hadoop.hbase.io.hfile

Examples of org.apache.hadoop.hbase.io.hfile.CacheConfig


    // get the block cache and region
    String regionName = table.getRegionLocations().firstKey().getEncodedName();
    HRegion region = TEST_UTIL.getRSForFirstRegionInTable(
        tableName).getFromOnlineRegions(regionName);
    Store store = region.getStores().values().iterator().next();
    CacheConfig cacheConf = store.getCacheConfig();
    cacheConf.setCacheDataOnWrite(true);
    cacheConf.setEvictOnClose(true);
    BlockCache cache = cacheConf.getBlockCache();

    // establish baseline stats
    long startBlockCount = cache.getBlockCount();
    long startBlockHits = cache.getStats().getHitCount();
    long startBlockMiss = cache.getStats().getMissCount();
View Full Code Here


    // login the server principal (if using secure Hadoop)
    UserProvider provider = UserProvider.instantiate(conf);
    provider.login("hbase.regionserver.keytab.file",
      "hbase.regionserver.kerberos.principal", this.isa.getHostName());
    regionServerAccounting = new RegionServerAccounting();
    cacheConfig = new CacheConfig(conf);
    uncaughtExceptionHandler = new UncaughtExceptionHandler() {
      public void uncaughtException(Thread t, Throwable e) {
        abort("Uncaught exception in service thread " + t.getName(), e);
      }
    };
View Full Code Here

    new HRegionServerCommandLine(regionServerClass).doMain(args);
  }

  @Override
  public List<BlockCacheColumnFamilySummary> getBlockCacheColumnFamilySummaries() throws IOException {
    BlockCache c = new CacheConfig(this.conf).getBlockCache();
    return c.getBlockCacheColumnFamilySummaries(this.conf);
  }
View Full Code Here

    // login the server principal (if using secure Hadoop)
    UserProvider provider = UserProvider.instantiate(conf);
    provider.login("hbase.regionserver.keytab.file",
      "hbase.regionserver.kerberos.principal", this.isa.getHostName());
    regionServerAccounting = new RegionServerAccounting();
    cacheConfig = new CacheConfig(conf);
    uncaughtExceptionHandler = new UncaughtExceptionHandler() {
      public void uncaughtException(Thread t, Throwable e) {
        abort("Uncaught exception in service thread " + t.getName(), e);
      }
    };
View Full Code Here

    new HRegionServerCommandLine(regionServerClass).doMain(args);
  }

  @Override
  public List<BlockCacheColumnFamilySummary> getBlockCacheColumnFamilySummaries() throws IOException {
    BlockCache c = new CacheConfig(this.conf).getBlockCache();
    return c.getBlockCacheColumnFamilySummaries(this.conf);
  }
View Full Code Here

    // login the server principal (if using secure Hadoop)
    userProvider.login("hbase.regionserver.keytab.file",
      "hbase.regionserver.kerberos.principal", this.isa.getHostName());
    regionServerAccounting = new RegionServerAccounting();
    cacheConfig = new CacheConfig(conf);
    uncaughtExceptionHandler = new UncaughtExceptionHandler() {
      @Override
      public void uncaughtException(Thread t, Throwable e) {
        abort("Uncaught exception in service thread " + t.getName(), e);
      }
View Full Code Here

      if (!f.getName().startsWith("part-")) { // filter out "_SUCCESS"
        continue;
      }
      HFile.Reader reader = null;
      try {
        reader = HFile.createReader(fs, f, new CacheConfig(conf));
        assertEquals(DataBlockEncoding.PREFIX, reader.getEncodingOnDisk());
      } finally {
        reader.close();
      }
      hfilesCount++;
View Full Code Here

        continue;
      }
      StoreFile.Reader reader = new StoreFile.Reader(
          fs,
          f,
          new CacheConfig(fs.getConf()),
          DataBlockEncoding.NONE);
      StoreFileScanner scanner = reader.getStoreFileScanner(false, false);
      scanner.seek(fakeKV); // have to call seek of each underlying scanner, otherwise KeyValueHeap won't work
      scanners.add(scanner);
    }
View Full Code Here

    HFile.Writer w = null;
    try {
      List<KeyValue> sortedKVs = Lists.newArrayList(kvs);
      Collections.sort(sortedKVs, KeyValue.COMPARATOR);
      FileSystem fs = FileSystem.get(conf);
      w = HFile.getWriterFactory(conf, new CacheConfig(conf))
          .withPath(fs, inputPath)
          .withComparator(KeyValue.COMPARATOR)
          .create();
      for (KeyValue kv : sortedKVs) {
        w.append(kv);
View Full Code Here

      final Pair<byte[][], byte[][]> startEndKeys)
      throws IOException {
    final Path hfilePath = item.hfilePath;
    final FileSystem fs = hfilePath.getFileSystem(getConf());
    HFile.Reader hfr = HFile.createReader(fs, hfilePath,
        new CacheConfig(getConf()), getConf());
    final byte[] first, last;
    try {
      hfr.loadFileInfo();
      first = hfr.getFirstRowKey();
      last = hfr.getLastRowKey();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.io.hfile.CacheConfig

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.