Package org.apache.hadoop.hbase.io.hfile

Examples of org.apache.hadoop.hbase.io.hfile.HFileContext


      Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();

      int blocksize = familyDescriptor.getBlocksize();
      Algorithm compression = familyDescriptor.getCompression();
      BloomType bloomFilterType = familyDescriptor.getBloomFilterType();
      HFileContext hFileContext = new HFileContextBuilder()
                                  .withCompression(compression)
                                  .withChecksumType(HStore.getChecksumType(conf))
                                  .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))
                                  .withBlockSize(blocksize)
                                  .withDataBlockEncoding(familyDescriptor.getDataBlockEncoding())
View Full Code Here


    StoreFile f = this.store.getStorefiles().iterator().next();
    Path storedir = f.getPath().getParent();
    long seqid = f.getMaxSequenceId();
    Configuration c = HBaseConfiguration.create();
    FileSystem fs = FileSystem.get(c);
    HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).build();
    StoreFile.Writer w = new StoreFile.WriterBuilder(c, new CacheConfig(c),
        fs)
            .withOutputDir(storedir)
            .withFileContext(meta)
            .build();
View Full Code Here

    Path p = new Path(root_dir, "test");

    Configuration conf = TEST_UTIL.getConfiguration();
    FileSystem fs = FileSystem.get(conf);
    CacheConfig cacheConf = new CacheConfig(conf);
    HFileContext meta = new HFileContextBuilder().withBlockSize(1024).build();
    HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
        .withPath(fs, p)
        .withFileContext(meta)
        .create();
View Full Code Here

      String root_dir = TEST_UTIL.getDataTestDir().toString();
      Path p = new Path(root_dir, "test");
      Configuration conf = TEST_UTIL.getConfiguration();
      FileSystem fs = FileSystem.get(conf);
      CacheConfig cacheConf = new CacheConfig(conf);
      HFileContext meta = new HFileContextBuilder().withBlockSize(1024).build();
      HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
              .withPath(fs, p)
              .withFileContext(meta)
              .create();
View Full Code Here

  @Test
  public void testReseek() throws Exception {
    // write the file
    Path f = new Path(ROOT_DIR, "testReseek");
    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).withIncludesTags(true)
        .withCompressTags(true).withDataBlockEncoding(DataBlockEncoding.PREFIX).build();
    // Make a store file and write data to it.
    StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs).withFilePath(f)
        .withFileContext(meta).build();

View Full Code Here

    final Path hfilePath = new Path(hFilePath);
    fs.mkdirs(hfilePath);
    Path path = new Path(pathStr);
    HFile.WriterFactory wf = HFile.getWriterFactoryNoCache(TEST_UTIL.getConfiguration());
    Assert.assertNotNull(wf);
    HFileContext context = new HFileContext();
    HFile.Writer writer = wf.withPath(fs, path).withFileContext(context).create();
    KeyValue kv = new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("col"), Bytes.toBytes("q"), l,
        Bytes.toBytes("version2"));

    // Set cell seq id to test bulk load native hfiles.
View Full Code Here

        "regionname"), "familyname");
    HFileSystem hfs = (HFileSystem)util.getTestFileSystem();
    FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs());
    FileSystem fs = new HFileSystem(faultyfs);
    CacheConfig cacheConf = new CacheConfig(util.getConfiguration());
    HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
    StoreFile.Writer writer = new StoreFile.WriterBuilder(
        util.getConfiguration(), cacheConf, hfs)
            .withOutputDir(hfilePath)
            .withFileContext(meta)
            .build();
View Full Code Here

        "regionname"), "familyname");
    HFileSystem hfs = (HFileSystem)util.getTestFileSystem();
    FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs());
    HFileSystem fs = new HFileSystem(faultyfs);
    CacheConfig cacheConf = new CacheConfig(util.getConfiguration());
    HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
    StoreFile.Writer writer = new StoreFile.WriterBuilder(
        util.getConfiguration(), cacheConf, hfs)
            .withOutputDir(hfilePath)
            .withFileContext(meta)
            .build();
View Full Code Here

        byte[] startKey, byte[] endKey, int numRows) throws IOException {

      HFile.Writer writer = null;
      long now = System.currentTimeMillis();
      try {
        HFileContext context = new HFileContextBuilder().build();
        writer = HFile.getWriterFactory(conf, new CacheConfig(conf))
            .withPath(fs, path)
            .withFileContext(context)
            .create();
        // subtract 2 since numRows doesn't include boundary keys
View Full Code Here

      throws IOException {
    conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE,
        BLOOM_BLOCK_SIZES[t]);
    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);
    cacheConf = new CacheConfig(conf);
    HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCK_SIZES[t]).build();
    StoreFile.Writer w = new StoreFile.WriterBuilder(conf, cacheConf, fs)
            .withOutputDir(TEST_UTIL.getDataTestDir())
            .withBloomType(bt)
            .withFileContext(meta)
            .build();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.io.hfile.HFileContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.