Examples of CacheConfig


Examples of org.apache.hadoop.hbase.io.hfile.CacheConfig

  private SortedSet<String> keysRead = new ConcurrentSkipListSet<String>();
  private List<StoreFile> inputStoreFiles;

  public HFileReadWriteTest() {
    conf = HBaseConfiguration.create();
    cacheConf = new CacheConfig(conf);
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.CacheConfig

      final Pair<byte[][], byte[][]> startEndKeys)
      throws IOException {
    final Path hfilePath = item.hfilePath;
    final FileSystem fs = hfilePath.getFileSystem(getConf());
    HFile.Reader hfr = HFile.createReader(fs, hfilePath,
        new CacheConfig(getConf()));
    final byte[] first, last;
    try {
      hfr.loadFileInfo();
      first = hfr.getFirstRowKey();
      last = hfr.getLastRowKey();
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.CacheConfig

  private static void copyHFileHalf(
      Configuration conf, Path inFile, Path outFile, Reference reference,
      HColumnDescriptor familyDescriptor)
  throws IOException {
    FileSystem fs = inFile.getFileSystem(conf);
    CacheConfig cacheConf = new CacheConfig(conf);
    HalfStoreFileReader halfReader = null;
    StoreFile.Writer halfWriter = null;
    HFileDataBlockEncoder dataBlockEncoder = new HFileDataBlockEncoderImpl(
        familyDescriptor.getDataBlockEncodingOnDisk(),
        familyDescriptor.getDataBlockEncoding());
    try {
      halfReader = new HalfStoreFileReader(fs, inFile, cacheConf,
          reference, DataBlockEncoding.NONE);
      Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();

      int blocksize = familyDescriptor.getBlocksize();
      Algorithm compression = familyDescriptor.getCompression();
      BloomType bloomFilterType = familyDescriptor.getBloomFilterType();

      halfWriter = new StoreFile.WriterBuilder(conf, cacheConf,
          fs, blocksize)
              .withFilePath(outFile)
              .withCompression(compression)
              .withDataBlockEncoder(dataBlockEncoder)
              .withBloomType(bloomFilterType)
              .withChecksumType(HStore.getChecksumType(conf))
              .withBytesPerChecksum(HStore.getBytesPerChecksum(conf))
              .build();
      HFileScanner scanner = halfReader.getScanner(false, false, false);
      scanner.seekTo();
      do {
        KeyValue kv = scanner.getKeyValue();
        halfWriter.append(kv);
      } while (scanner.next());

      for (Map.Entry<byte[],byte[]> entry : fileInfo.entrySet()) {
        if (shouldCopyHFileMetaKey(entry.getKey())) {
          halfWriter.appendFileInfo(entry.getKey(), entry.getValue());
        }
      }
    } finally {
      if (halfWriter != null) halfWriter.close();
      if (halfReader != null) halfReader.close(cacheConf.shouldEvictOnClose());
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.CacheConfig

      Path[] hfiles = FileUtil.stat2Paths(fs.listStatus(familyDir));
      for (Path hfile : hfiles) {
        if (hfile.getName().startsWith("_")) continue;
        HFile.Reader reader = HFile.createReader(fs, hfile,
            new CacheConfig(getConf()));
        final byte[] first, last;
        try {
          if (hcd.getCompressionType() != reader.getCompressionAlgorithm()) {
            hcd.setCompressionType(reader.getCompressionAlgorithm());
            LOG.info("Setting compression " + hcd.getCompressionType().name() +
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.CacheConfig

    String root_dir = TEST_UTIL.getDataTestDir().toString();
    Path p = new Path(root_dir, "test");

    Configuration conf = TEST_UTIL.getConfiguration();
    FileSystem fs = FileSystem.get(conf);
    CacheConfig cacheConf = new CacheConfig(conf);

    HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
        .withPath(fs, p)
        .withBlockSize(1024)
        .create();
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.CacheConfig

  public void testHalfScanner() throws IOException {
      String root_dir = TEST_UTIL.getDataTestDir().toString();
      Path p = new Path(root_dir, "test");
      Configuration conf = TEST_UTIL.getConfiguration();
      FileSystem fs = FileSystem.get(conf);
      CacheConfig cacheConf = new CacheConfig(conf);

      HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
              .withPath(fs, p)
              .withBlockSize(1024)
              .create();
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.CacheConfig

      FileStatus[] hfiles = fs.listStatus(cf.getPath());
      for (FileStatus hfile : hfiles) {
        byte[] start, end;
        HFile.Reader hf = null;
        try {
          CacheConfig cacheConf = new CacheConfig(getConf());
          hf = HFile.createReader(fs, hfile.getPath(), cacheConf);
          hf.loadFileInfo();
          KeyValue startKv = KeyValue.createKeyValueFromKey(hf.getFirstKey());
          start = startKv.getRow();
          KeyValue endKv = KeyValue.createKeyValueFromKey(hf.getLastKey());
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.CacheConfig

  @Test
  public void testEncodedSeeker() throws IOException {
    System.err.println("Testing encoded seekers for encoding " + encoding);
    LruBlockCache cache =
      (LruBlockCache)new CacheConfig(testUtil.getConfiguration()).getBlockCache();
    cache.clearCache();
    // Need to disable default row bloom filter for this test to pass.
    HColumnDescriptor hcd = (new HColumnDescriptor(CF_NAME)).setMaxVersions(MAX_VERSIONS).
        setDataBlockEncoding(encoding).
        setEncodeOnDisk(encodeOnDisk).
View Full Code Here

Examples of org.apache.http.impl.client.cache.CacheConfig

    @Override
    @Before
    public void setUp() throws Exception {
        mockMemcachedClient = EasyMock.createMock(MemcachedClientIF.class);
        mockSerializer = EasyMock.createMock(HttpCacheEntrySerializer.class);
        CacheConfig config = new CacheConfig();
        config.setMaxUpdateRetries(1);
        impl = new MemcachedHttpCacheStorage(mockMemcachedClient, config,
                mockSerializer);
    }
View Full Code Here

Examples of org.apache.http.impl.client.cache.CacheConfig

     * Create a storage backend using the pre-configured given
     * <i>memcached</i> client.
     * @param cache client to use for communicating with <i>memcached</i>
     */
    public MemcachedHttpCacheStorage(MemcachedClientIF cache) {
        this(cache, new CacheConfig(), new DefaultHttpCacheEntrySerializer());
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.