Package org.apache.hadoop.hive.conf

Examples of org.apache.hadoop.hive.conf.HiveConf$PatternValidator


  /**
   * Create a new HiveConf and set properties necessary for unit tests.
   */
  protected void setUpHiveConf() {
    hiveConf = new HiveConf(this.getClass());
    hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
    hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, "");
    hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
    hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, TEST_WAREHOUSE_DIR);
  }
View Full Code Here


        .getHadoopThriftAuthBridge());
    isServerRunning = true;
    securityManager = System.getSecurityManager();
    System.setSecurityManager(new NoExitSecurityManager());

    hcatConf = new HiveConf(TestHCatPartitionPublish.class);
    hcatConf.set("hive.metastore.local", "false");
    hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:"
        + msPort);
    hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3);
View Full Code Here

      LogUtils.initHiveLog4j();
    } catch (LogInitializationException e) {

    }

    CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
    ss.in = System.in;
    try {
      ss.out = new PrintStream(System.out, true, "UTF-8");
      ss.err = new PrintStream(System.err, true, "UTF-8");
    } catch (UnsupportedEncodingException e) {
      System.exit(1);
    }

    HiveConf conf = ss.getConf();

    HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName());

    SessionState.start(ss);

    Options options = new Options();

    // -e 'quoted-query-string'
    options.addOption(OptionBuilder
        .hasArg()
        .withArgName("exec")
        .withDescription("hcat command given from command line")
        .create('e'));

    // -f <query-file>
    options.addOption(OptionBuilder
        .hasArg()
        .withArgName("file")
        .withDescription("hcat commands in file")
        .create('f'));

    // -g
    options.addOption(OptionBuilder
        .hasArg().
        withArgName("group").
        withDescription("group for the db/table specified in CREATE statement").
        create('g'));

    // -p
    options.addOption(OptionBuilder
        .hasArg()
        .withArgName("perms")
        .withDescription("permissions for the db/table specified in CREATE statement")
        .create('p'));

    // -D
    options.addOption(OptionBuilder
        .hasArgs(2)
        .withArgName("property=value")
        .withValueSeparator()
        .withDescription("use hadoop value for given property")
        .create('D'));

    // [-h|--help]
    options.addOption(new Option("h", "help", false, "Print help information"));

    Parser parser = new GnuParser();
    CommandLine cmdLine = null;

    try {
      cmdLine = parser.parse(options, args);

    } catch (ParseException e) {
      printUsage(options, ss.err);
      System.exit(1);
    }
    // -e
    String execString = (String) cmdLine.getOptionValue('e');
    // -f
    String fileName = (String) cmdLine.getOptionValue('f');
    // -h
    if (cmdLine.hasOption('h')) {
      printUsage(options, ss.out);
      System.exit(0);
    }

    if (execString != null && fileName != null) {
      ss.err.println("The '-e' and '-f' options cannot be specified simultaneously");
      printUsage(options, ss.err);
      System.exit(1);
    }

    // -p
    String perms = (String) cmdLine.getOptionValue('p');
    if (perms != null) {
      validatePermissions(ss, conf, perms);
    }

    // -g
    String grp = (String) cmdLine.getOptionValue('g');
    if (grp != null) {
      conf.set(HCatConstants.HCAT_GROUP, grp);
    }

    // -D
    setConfProperties(conf, cmdLine.getOptionProperties("D"));
View Full Code Here

  }

  public static HiveConf getHiveConf(Configuration conf)
    throws IOException {

    HiveConf hiveConf = new HiveConf(conf, HCatUtil.class);

    //copy the hive conf into the job conf and restore it
    //in the backend context
    if (conf.get(HCatConstants.HCAT_KEY_HIVE_CONF) == null) {
      conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
        HCatUtil.serialize(hiveConf.getAllProperties()));
    } else {
      //Copy configuration properties into the hive conf
      Properties properties = (Properties) HCatUtil.deserialize(
        conf.get(HCatConstants.HCAT_KEY_HIVE_CONF));

      for (Map.Entry<Object, Object> prop : properties.entrySet()) {
        if (prop.getValue() instanceof String) {
          hiveConf.set((String) prop.getKey(), (String) prop.getValue());
        } else if (prop.getValue() instanceof Integer) {
          hiveConf.setInt((String) prop.getKey(),
            (Integer) prop.getValue());
        } else if (prop.getValue() instanceof Boolean) {
          hiveConf.setBoolean((String) prop.getKey(),
            (Boolean) prop.getValue());
        } else if (prop.getValue() instanceof Long) {
          hiveConf.setLong((String) prop.getKey(), (Long) prop.getValue());
        } else if (prop.getValue() instanceof Float) {
          hiveConf.setFloat((String) prop.getKey(),
            (Float) prop.getValue());
        }
      }
    }

    if (conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) {
      hiveConf.set("hive.metastore.token.signature",
        conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE));
    }

    return hiveConf;
  }
View Full Code Here

    return job.getConfiguration().get(HCatConstants.HCAT_METASTORE_PRINCIPAL);
  }

  private static HiveMetaStoreClient getHiveMetaClient(String serverUri,
                             String serverKerberosPrincipal, Class<?> clazz) throws Exception {
    HiveConf hiveConf = new HiveConf(clazz);

    if (serverUri != null) {
      hiveConf.set("hive.metastore.local", "false");
      hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, serverUri.trim());
    }

    if (serverKerberosPrincipal != null) {
      hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true);
      hiveConf.setVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL, serverKerberosPrincipal);
    }

    try {
      return HCatUtil.getHiveClient(hiveConf);
    } catch (Exception e) {
View Full Code Here

    // by listening on a topic named "HCAT" and message selector string
    // as "HCAT_EVENT = HCAT_ADD_TABLE"
    if (tableEvent.getStatus()) {
      Table tbl = tableEvent.getTable();
      HMSHandler handler = tableEvent.getHandler();
      HiveConf conf = handler.getHiveConf();
      Table newTbl;
      try {
        newTbl = handler.get_table(tbl.getDbName(), tbl.getTableName())
          .deepCopy();
        newTbl.getParameters().put(
View Full Code Here

  @Override
  public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {
    OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context);
    HiveMetaStoreClient client = null;
    try {
      HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration());
      client = HCatUtil.getHiveClient(hiveConf);
      handleDuplicatePublish(context,
        jobInfo,
        client,
        new Table(jobInfo.getTableInfo().getTable()));
View Full Code Here

  /**
   * Create a new HiveConf and set properties necessary for unit tests.
   */
  protected void setUpHiveConf() {
    hiveConf = new HiveConf(this.getClass());
    hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
    hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, "");
    hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
    hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, TEST_WAREHOUSE_DIR);
  }
View Full Code Here

  private static PigServer server;
  private static String[] input;
  private static HiveConf hiveConf;

  public void Initialize() throws Exception {
    hiveConf = new HiveConf(this.getClass());
    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
    driver = new Driver(hiveConf);
View Full Code Here

  private static PigServer server;
  private static String[] input;
  private static HiveConf hiveConf;

  public void Initialize() throws Exception {
    hiveConf = new HiveConf(this.getClass());
    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
    driver = new Driver(hiveConf);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.conf.HiveConf$PatternValidator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.