Package co.cask.cdap.data2.datafabric.dataset

Examples of co.cask.cdap.data2.datafabric.dataset.DatasetMetaTableUtil


   * The transaction coprocessors (0.94 and 0.96 versions of {@code DefaultTransactionProcessor}) need access
   * to CConfiguration values in order to load transaction snapshots for data cleanup.
   */
  private void checkTransactionRequirements() {
    try {
      new ConfigurationTable(hConf).write(ConfigurationTable.Type.DEFAULT, cConf);
    } catch (IOException ioe) {
      throw Throwables.propagate(ioe);
    }
  }
View Full Code Here


    this.hConf = hConf;
    this.configTableName = configTableName;
    String queueConfigTableName = Bytes.toString(configTableName);
    String[] parts = queueConfigTableName.split("\\.", 2);
    this.tableNamespace = parts[0];
    this.configTable = new ConfigurationTable(hConf);
  }
View Full Code Here

  }

  @Override
  public void setConf(Configuration conf) {
    super.setConf(conf);
    this.configTable = new ConfigurationTable(conf);
  }
View Full Code Here

    final CConfiguration cConf = injector.getInstance(CConfiguration.class);
    DefaultDatasetNamespace namespace = new DefaultDatasetNamespace(cConf, Namespace.USER);

    Configuration hConf = injector.getInstance(Configuration.class);
    HBaseAdmin hAdmin = new HBaseAdmin(hConf);
    final HBaseTableUtil hBaseTableUtil = injector.getInstance(HBaseTableUtil.class);

    for (HTableDescriptor desc : hAdmin.listTables()) {
      String tableName = desc.getNameAsString();
      // todo: it works now, but we will want to change it if namespacing of datasets in HBase is more than +prefix
      if (namespace.fromNamespaced(tableName) != null) {
View Full Code Here

    // (for example, if Hadoop uses a different Java version than CDAP).

    Set<String> bootstrapClassPaths = ExploreServiceUtils.getBoostrapClasses();

    Set<File> hBaseTableDeps = ExploreServiceUtils.traceDependencies(
      new HBaseTableUtilFactory().get().getClass().getCanonicalName(),
      bootstrapClassPaths, null);

    // Note the order of dependency jars is important so that HBase jars come first in the classpath order
    // LinkedHashSet maintains insertion order while removing duplicate entries.
    Set<File> orderedDependencies = new LinkedHashSet<File>();
View Full Code Here

    if (classLoader == null) {
      usingCL = ExploreRuntimeModule.class.getClassLoader();
    }
    Set<String> bootstrapClassPaths = getBoostrapClasses();

    Set<File> hBaseTableDeps = traceDependencies(new HBaseTableUtilFactory().get().getClass().getCanonicalName(),
                                                 bootstrapClassPaths, usingCL);

    // Note the order of dependency jars is important so that HBase jars come first in the classpath order
    // LinkedHashSet maintains insertion order while removing duplicate entries.
    Set<File> orderedDependencies = new LinkedHashSet<File>();
View Full Code Here

      // We have to add this Hadoop Configuration to the dependency jar so that when the Spark job runs outside
      // CDAP it can create the BasicMapReduceContext to have access to our datasets, transactions etc.
      resources.add(hConfLocation);

      try {
        Class<?> hbaseTableUtilClass = new HBaseTableUtilFactory().get().getClass();
        classes.add(hbaseTableUtilClass);
      } catch (ProvisionException e) {
        LOG.warn("Not including HBaseTableUtil classes in submitted Job Jar since they are not available");
      }
View Full Code Here

      LOG.info("OutputFormat class not found: {}", t.getMessage(), t);
      // Ignore
    }

    try {
      Class<?> hbaseTableUtilClass = new HBaseTableUtilFactory().get().getClass();
      classes.add(hbaseTableUtilClass);
    } catch (ProvisionException e) {
      LOG.warn("Not including HBaseTableUtil classes in submitted Job Jar since they are not available");
    }
View Full Code Here

    }
  }


  private TwillPreparer prepare(TwillPreparer preparer) {
    return preparer.withDependencies(new HBaseTableUtilFactory().get().getClass())
      .addSecureStore(secureStoreUpdater.update(null, null)); // HBaseSecureStoreUpdater.update() ignores parameters
  }
View Full Code Here

        if (options.isDebug()) {
          LOG.info("Starting {} with debugging enabled.", program.getId());
          twillPreparer.enableDebugging();
        }
        TwillController twillController = twillPreparer
          .withDependencies(new HBaseTableUtilFactory().get().getClass())
          .addLogHandler(new PrinterLogHandler(new PrintWriter(System.out)))
          .addSecureStore(YarnSecureStore.create(HBaseTokenUtils.obtainToken(hConf, new Credentials())))
          .withApplicationArguments(
            String.format("--%s", RunnableOptions.JAR), copiedProgram.getJarLocation().getName(),
            String.format("--%s", RunnableOptions.RUNTIME_ARGS), runtimeArgs
View Full Code Here

TOP

Related Classes of co.cask.cdap.data2.datafabric.dataset.DatasetMetaTableUtil

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.