Examples of FSTableDescriptors


Examples of org.apache.hadoop.hbase.util.FSTableDescriptors

    try {
      fs = FileSystem.get(c);
    } catch (IOException e) {
      throw new RuntimeException(e);
    }
    FSTableDescriptors fstd =
      new FSTableDescriptors(fs, new Path(c.get(HConstants.HBASE_DIR)));
    try {
      return fstd.get(this.tableName);
    } catch (IOException e) {
      throw new RuntimeException(e);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.util.FSTableDescriptors

    try {
      fs = FileSystem.get(c);
    } catch (IOException e) {
      throw new RuntimeException(e);
    }
    FSTableDescriptors fstd =
      new FSTableDescriptors(fs, new Path(c.get(HConstants.HBASE_DIR)));
    try {
      fstd.add(newDesc);
    } catch (IOException e) {
      throw new RuntimeException(e);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.util.FSTableDescriptors

      throws IOException {
    final String name = "testAlreadyExists";
    FileSystem fs = FileSystem.get(UTIL.getConfiguration());
    // Cleanup old tests if any detrius laying around.
    Path rootdir = new Path(UTIL.getDataTestDir(), name);
    TableDescriptors htds = new FSTableDescriptors(fs, rootdir);
    HTableDescriptor htd = new HTableDescriptor(name);
    htds.add(htd);
    assertFalse("Should not create new table descriptor",
      FSTableDescriptors.createTableDescriptor(fs, rootdir, htd, false));
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.util.FSTableDescriptors

      // to defaults).
      this.conf.set("fs.defaultFS", this.conf.get("hbase.rootdir"));
      // Get fs instance used by this RS
      this.fs = FileSystem.get(this.conf);
      this.rootDir = new Path(this.conf.get(HConstants.HBASE_DIR));
      this.tableDescriptors = new FSTableDescriptors(this.fs, this.rootDir, true);
      this.hlog = setupWALAndReplication();
      // Init in here rather than in constructor after thread name has been set
      this.metrics = new RegionServerMetrics();
      startServiceThreads();
      LOG.info("Serving as " + this.serverNameFromMasterPOV +
View Full Code Here

Examples of org.apache.hadoop.hbase.util.FSTableDescriptors

    Path tempdir = fileSystemManager.getTempDir();
    FileSystem fs = fileSystemManager.getFileSystem();

    // 1. Create Table Descriptor
    Path tempTableDir = FSUtils.getTableDir(tempdir, tableName);
    new FSTableDescriptors(this.conf).createTableDescriptorForTableDirectory(
      tempTableDir, this.hTableDescriptor, false);
    Path tableDir = FSUtils.getTableDir(fileSystemManager.getRootDir(), tableName);

    // 2. Create Regions
    List<HRegionInfo> regionInfos = handleCreateHdfsRegions(tempdir, tableName);
View Full Code Here

Examples of org.apache.hadoop.hbase.util.FSTableDescriptors

      org.apache.hadoop.hbase.util.FSTableDescriptorMigrationToSubdir
        .migrateFSTableDescriptorsIfNecessary(fs, rd);
    }
     
    // Create tableinfo-s for META if not already there.
    new FSTableDescriptors(fs, rd).createTableDescriptor(HTableDescriptor.META_TABLEDESC);

    return rd;
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.util.FSTableDescriptors

    this.masterActiveTime = System.currentTimeMillis();
    // TODO: Do this using Dependency Injection, using PicoContainer, Guice or Spring.
    this.fileSystemManager = new MasterFileSystem(this, this, metricsMaster, masterRecovery);

    this.tableDescriptors =
      new FSTableDescriptors(this.fileSystemManager.getFileSystem(),
      this.fileSystemManager.getRootDir());

    // publish cluster ID
    status.setStatus("Publishing Cluster ID in ZooKeeper");
    ZKClusterId.setClusterId(this.zooKeeper, fileSystemManager.getClusterId());
View Full Code Here

Examples of org.apache.hadoop.hbase.util.FSTableDescriptors

    HTableDescriptor htdEnabled = new HTableDescriptor(TableName.valueOf(enabledTable));
    htdEnabled.addFamily(new HColumnDescriptor(FAMILY));

    FileSystem filesystem = FileSystem.get(conf);
    Path rootdir = FSUtils.getRootDir(conf);
    FSTableDescriptors fstd = new FSTableDescriptors(filesystem, rootdir);
    // Write the .tableinfo
    fstd.createTableDescriptor(htdEnabled);

    HRegionInfo hriEnabled = new HRegionInfo(htdEnabled.getTableName(), null, null);
    createRegion(hriEnabled, rootdir, conf, htdEnabled);

    List<HRegionInfo> enabledRegions = TEST_UTIL.createMultiRegionsInMeta(
        TEST_UTIL.getConfiguration(), htdEnabled, SPLIT_KEYS);

    TableName disabledTable = TableName.valueOf("disabledTable");
    HTableDescriptor htdDisabled = new HTableDescriptor(disabledTable);
    htdDisabled.addFamily(new HColumnDescriptor(FAMILY));
    // Write the .tableinfo
    fstd.createTableDescriptor(htdDisabled);
    HRegionInfo hriDisabled = new HRegionInfo(htdDisabled.getTableName(), null, null);
    createRegion(hriDisabled, rootdir, conf, htdDisabled);
    List<HRegionInfo> disabledRegions = TEST_UTIL.createMultiRegionsInMeta(
        TEST_UTIL.getConfiguration(), htdDisabled, SPLIT_KEYS);
View Full Code Here

Examples of org.apache.hadoop.hbase.util.FSTableDescriptors

    byte [] enabledTable = Bytes.toBytes("enabledTable");
    HTableDescriptor htdEnabled = new HTableDescriptor(TableName.valueOf(enabledTable));
    htdEnabled.addFamily(new HColumnDescriptor(FAMILY));
    FileSystem filesystem = FileSystem.get(conf);
    Path rootdir = FSUtils.getRootDir(conf);
    FSTableDescriptors fstd = new FSTableDescriptors(filesystem, rootdir);
    // Write the .tableinfo
    fstd.createTableDescriptor(htdEnabled);
    HRegionInfo hriEnabled = new HRegionInfo(htdEnabled.getTableName(),
        null, null);
    createRegion(hriEnabled, rootdir, conf, htdEnabled);

    List<HRegionInfo> enabledRegions = TEST_UTIL.createMultiRegionsInMeta(
        TEST_UTIL.getConfiguration(), htdEnabled, SPLIT_KEYS);

    TableName disabledTable =
        TableName.valueOf("disabledTable");
    HTableDescriptor htdDisabled = new HTableDescriptor(disabledTable);
    htdDisabled.addFamily(new HColumnDescriptor(FAMILY));
    // Write the .tableinfo
    fstd.createTableDescriptor(htdDisabled);
    HRegionInfo hriDisabled = new HRegionInfo(htdDisabled.getTableName(), null, null);
    createRegion(hriDisabled, rootdir, conf, htdDisabled);

    List<HRegionInfo> disabledRegions = TEST_UTIL.createMultiRegionsInMeta(
        TEST_UTIL.getConfiguration(), htdDisabled, SPLIT_KEYS);
View Full Code Here

Examples of org.apache.hadoop.hbase.util.FSTableDescriptors

  public void testRestore(final Path snapshotDir, final String sourceTableName,
      final HTableDescriptor htdClone) throws IOException {
    LOG.debug("pre-restore table=" + htdClone.getTableName() + " snapshot=" + snapshotDir);
    FSUtils.logFileSystemState(fs, rootDir, LOG);

    new FSTableDescriptors(conf).createTableDescriptor(htdClone);
    RestoreSnapshotHelper helper = getRestoreHelper(rootDir, snapshotDir, sourceTableName, htdClone);
    helper.restoreHdfsRegions();

    LOG.debug("post-restore table=" + htdClone.getTableName() + " snapshot=" + snapshotDir);
    FSUtils.logFileSystemState(fs, rootDir, LOG);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.