Package org.apache.hadoop.hbase.security

Examples of org.apache.hadoop.hbase.security.UserProvider


  /**
   * Returns a new {@code PermissionCache} initialized with permission assignments
   * from the {@code hbase.superuser} configuration key.
   */
  private PermissionCache<Permission> initGlobal(Configuration conf) throws IOException {
    UserProvider userProvider = UserProvider.instantiate(conf);
    User user = userProvider.getCurrent();
    if (user == null) {
      throw new IOException("Unable to obtain the current user, " +
          "authorization checks for internal operations will not work correctly!");
    }
    PermissionCache<Permission> newCache = new PermissionCache<Permission>();
View Full Code Here


    }
    initCredentials(job);
  }

  public static void initCredentials(JobConf job) throws IOException {
    UserProvider userProvider = UserProvider.instantiate(job);
    if (userProvider.isHadoopSecurityEnabled()) {
      // propagate delegation related props from launcher job to MR job
      if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
        job.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
      }
    }

    if (userProvider.isHBaseSecurityEnabled()) {
      try {
        // login the server principal (if using secure Hadoop)
        User user = userProvider.getCurrent();
        Token<AuthenticationTokenIdentifier> authToken = getAuthToken(job, user);
        if (authToken == null) {
          user.obtainAuthTokenForJob(job);
        } else {
          job.getCredentials().addToken(authToken.getService(), authToken);
View Full Code Here

    // login the zookeeper client principal (if using security)
    ZKUtil.loginClient(this.conf, "hbase.zookeeper.client.keytab.file",
      "hbase.zookeeper.client.kerberos.principal", this.isa.getHostName());

    // initialize server principal (if using secure Hadoop)
    UserProvider provider = UserProvider.instantiate(conf);
    provider.login("hbase.master.keytab.file",
      "hbase.master.kerberos.principal", this.isa.getHostName());

    LOG.info("hbase.rootdir=" + FSUtils.getRootDir(this.conf) +
        ", hbase.cluster.distributed=" + this.conf.getBoolean("hbase.cluster.distributed", false));
View Full Code Here

      initCredentials(job);
    }
  }

  public static void initCredentials(Job job) throws IOException {
    UserProvider userProvider = UserProvider.instantiate(job.getConfiguration());
    if (userProvider.isHadoopSecurityEnabled()) {
      // propagate delegation related props from launcher job to MR job
      if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
        job.getConfiguration().set("mapreduce.job.credentials.binary",
                                   System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
      }
    }

    if (userProvider.isHBaseSecurityEnabled()) {
      try {
        // init credentials for remote cluster
        String quorumAddress = job.getConfiguration().get(TableOutputFormat.QUORUM_ADDRESS);
        User user = userProvider.getCurrent();
        if (quorumAddress != null) {
          Configuration peerConf = HBaseConfiguration.create(job.getConfiguration());
          ZKUtil.applyClusterKeyToConf(peerConf, quorumAddress);
          obtainAuthTokenForJob(job, peerConf, user);
        }
View Full Code Here

   * @param quorumAddress string that contains the 3 required configuratins
   * @throws IOException When the authentication token cannot be obtained.
   */
  public static void initCredentialsForCluster(Job job, String quorumAddress)
      throws IOException {
    UserProvider userProvider = UserProvider.instantiate(job.getConfiguration());
    if (userProvider.isHBaseSecurityEnabled()) {
      try {
        Configuration peerConf = HBaseConfiguration.create(job.getConfiguration());
        ZKUtil.applyClusterKeyToConf(peerConf, quorumAddress);
        obtainAuthTokenForJob(job, peerConf, userProvider.getCurrent());
      } catch (InterruptedException e) {
        LOG.info("Interrupted obtaining user authentication token");
        Thread.interrupted();
      }
    }
View Full Code Here

    VersionInfo.logVersion();
    FilterHolder authFilter = null;
    Configuration conf = HBaseConfiguration.create();
    Class<? extends ServletContainer> containerClass = ServletContainer.class;
    UserProvider userProvider = UserProvider.instantiate(conf);
    // login the server principal (if using secure Hadoop)
    if (userProvider.isHadoopSecurityEnabled() && userProvider.isHBaseSecurityEnabled()) {
      String machineName = Strings.domainNamePointerToHostName(
        DNS.getDefaultHost(conf.get(REST_DNS_INTERFACE, "default"),
          conf.get(REST_DNS_NAMESERVER, "default")));
      String keytabFilename = conf.get(REST_KEYTAB_FILE);
      Preconditions.checkArgument(keytabFilename != null && !keytabFilename.isEmpty(),
        REST_KEYTAB_FILE + " should be set if security is enabled");
      String principalConfig = conf.get(REST_KERBEROS_PRINCIPAL);
      Preconditions.checkArgument(principalConfig != null && !principalConfig.isEmpty(),
        REST_KERBEROS_PRINCIPAL + " should be set if security is enabled");
      userProvider.login(REST_KEYTAB_FILE, REST_KERBEROS_PRINCIPAL, machineName);
      if (conf.get(REST_AUTHENTICATION_TYPE) != null) {
        containerClass = RESTServletContainer.class;
        authFilter = new FilterHolder();
        authFilter.setClassName(AuthFilter.class.getName());
        authFilter.setName("AuthenticationFilter");
View Full Code Here

  private static final Log LOG = LogFactory.getLog(AuthUtil.class);
  /**
   * Checks if security is enabled and if so, launches chore for refreshing kerberos ticket.
   */
  public static void launchAuthChore(Configuration conf) throws IOException {
    UserProvider userProvider = UserProvider.instantiate(conf);
    // login the principal (if using secure Hadoop)
    boolean securityEnabled =
        userProvider.isHadoopSecurityEnabled() && userProvider.isHBaseSecurityEnabled();
    if (!securityEnabled) return;
    String host = null;
    try {
      host = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
          conf.get("hbase.client.dns.interface", "default"),
          conf.get("hbase.client.dns.nameserver", "default")));
      userProvider.login("hbase.client.keytab.file", "hbase.client.kerberos.principal", host);
    } catch (UnknownHostException e) {
      LOG.error("Error resolving host name");
      throw e;
    } catch (IOException e) {
      LOG.error("Error while trying to perform the initial login");
      throw e;
    }

    final UserGroupInformation ugi = userProvider.getCurrent().getUGI();
    Stoppable stoppable = new Stoppable() {
      private volatile boolean isStopped = false;

      @Override
      public void stop(String why) {
View Full Code Here

    }

  }

  public ThriftServerRunner(Configuration conf) throws IOException {
    UserProvider userProvider = UserProvider.instantiate(conf);
    // login the server principal (if using secure Hadoop)
    boolean securityEnabled = userProvider.isHadoopSecurityEnabled()
      && userProvider.isHBaseSecurityEnabled();
    if (securityEnabled) {
      host = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
        conf.get("hbase.thrift.dns.interface", "default"),
        conf.get("hbase.thrift.dns.nameserver", "default")));
      userProvider.login("hbase.thrift.keytab.file",
        "hbase.thrift.kerberos.principal", host);
    }
    this.conf = HBaseConfiguration.create(conf);
    this.listenPort = conf.getInt(PORT_CONF_KEY, DEFAULT_LISTEN_PORT);
    this.metrics = new ThriftMetrics(conf, ThriftMetrics.ThriftServerType.ONE);
    this.hbaseHandler = new HBaseHandler(conf, userProvider);
    this.hbaseHandler.initMetrics(metrics);
    this.handler = HbaseHandlerMetricsProxy.newInstance(
      hbaseHandler, metrics, conf);
    this.realUser = userProvider.getCurrent().getUGI();
    qop = conf.get(THRIFT_QOP_KEY);
    if (qop != null) {
      if (!qop.equals("auth") && !qop.equals("auth-int")
          && !qop.equals("auth-conf")) {
        throw new IOException("Invalid " + THRIFT_QOP_KEY + ": " + qop
View Full Code Here

      }
    }
    this.properties = Collections.unmodifiableMap(m);

    try {
      UserProvider provider = UserProvider.instantiate(conf);
      User currentUser = provider.getCurrent();
      if (currentUser != null) {
        username = currentUser.getName();
      }
    } catch (IOException ioe) {
      ConnectionManager.LOG.warn(
View Full Code Here

   * @return Connection object for <code>conf</code>
   */
  public static Connection createConnection(Configuration conf, ExecutorService pool, User user)
  throws IOException {
    if (user == null) {
      UserProvider provider = UserProvider.instantiate(conf);
      user = provider.getCurrent();
    }

    return createConnection(conf, false, pool, user);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.security.UserProvider

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.