Package org.apache.hadoop.hive.ql.session

Examples of org.apache.hadoop.hive.ql.session.SessionState


    if (genericUDF instanceof GenericUDFBaseCompare && children.size() == 2) {

      TypeInfo oiTypeInfo0 = children.get(0).getTypeInfo();
      TypeInfo oiTypeInfo1 = children.get(1).getTypeInfo();

      SessionState ss = SessionState.get();
      Configuration conf = (ss != null) ? ss.getConf() : new Configuration();

      LogHelper console = new LogHelper(LOG);

      // For now, if a bigint is going to be cast to a double throw an error or warning
      if ((oiTypeInfo0.equals(TypeInfoFactory.stringTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) ||
          (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.stringTypeInfo))) {
        if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase("strict")) {
          throw new UDFArgumentException(ErrorMsg.NO_COMPARE_BIGINT_STRING.getMsg());
        } else {
          console.printError("WARNING: Comparing a bigint and a string may result in a loss of precision.");
        }
      } else if ((oiTypeInfo0.equals(TypeInfoFactory.doubleTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) ||
          (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.doubleTypeInfo))) {
        if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase("strict")) {
          throw new UDFArgumentException(ErrorMsg.NO_COMPARE_BIGINT_DOUBLE.getMsg());
        } else {
          console.printError("WARNING: Comparing a bigint and a double may result in a loss of precision.");
        }
      }
    }

    ObjectInspector oi = genericUDF.initializeAndFoldConstants(childrenOIs);

    String[] requiredJars = genericUDF.getRequiredJars();
    String[] requiredFiles = genericUDF.getRequiredFiles();
    SessionState ss = SessionState.get();

    if (requiredJars != null) {
      SessionState.ResourceType t = SessionState.find_resource_type("JAR");
      for (String jarPath : requiredJars) {
        ss.add_resource(t, jarPath);
      }
    }

    if (requiredFiles != null) {
      SessionState.ResourceType t = SessionState.find_resource_type("FILE");
      for (String filePath : requiredFiles) {
        ss.add_resource(t, filePath);
      }
    }

    return new ExprNodeGenericFuncDesc(oi, genericUDF, children);
  }
View Full Code Here


      if (tbl.getParameters() != null) {
        tbl.getParameters().remove(hive_metastoreConstants.DDL_TIME);
      }
      org.apache.hadoop.hive.metastore.api.Table tTbl = tbl.getTTable();
      PrincipalPrivilegeSet principalPrivs = new PrincipalPrivilegeSet();
      SessionState ss = SessionState.get();
      if (ss != null) {
        CreateTableAutomaticGrant grants = ss.getCreateTableGrants();
        if (grants != null) {
          principalPrivs.setUserPrivileges(grants.getUserGrants());
          principalPrivs.setGroupPrivileges(grants.getGroupGrants());
          principalPrivs.setRolePrivileges(grants.getRoleGrants());
          tTbl.setPrivileges(principalPrivs);
View Full Code Here

    }
    return metaStoreClient;
  }

  private String getUserName() {
    SessionState ss = SessionState.get();
    if (ss != null && ss.getAuthenticator() != null) {
      return ss.getAuthenticator().getUserName();
    }
    return null;
  }
View Full Code Here

    }
    return null;
  }

  private List<String> getGroupNames() {
    SessionState ss = SessionState.get();
    if (ss != null && ss.getAuthenticator() != null) {
      return ss.getAuthenticator().getGroupNames();
    }
    return null;
  }
View Full Code Here

    return e.getClass().getName() + "(" + e.getMessage() + ")";
  }

  public static String getResourceFiles(Configuration conf, SessionState.ResourceType t) {
    // fill in local files to be added to the task environment
    SessionState ss = SessionState.get();
    Set<String> files = (ss == null) ? null : ss.list_resource(t, null);
    if (files != null) {
      List<String> realFiles = new ArrayList<String>(files.size());
      for (String one : files) {
        try {
          realFiles.add(realFile(one, conf));
View Full Code Here

      return this.cmd;
    }
  }

  public void saveSession(QueryState qs) {
    SessionState oldss = SessionState.get();
    if (oldss != null && oldss.getHiveOperation() != null) {
      qs.init(oldss.getHiveOperation(), oldss.getCmd());
    }
  }
View Full Code Here

      qs.init(oldss.getHiveOperation(), oldss.getCmd());
    }
  }

  public void restoreSession(QueryState qs) {
    SessionState ss = SessionState.get();
    if (ss != null && qs != null && qs.isInitialized()) {
      ss.setCmd(qs.getCmd());
      ss.setCommandType(qs.getOp());
    }
  }
View Full Code Here

  private void doAuthorization(BaseSemanticAnalyzer sem)
    throws HiveException, AuthorizationException {
    HashSet<ReadEntity> inputs = sem.getInputs();
    HashSet<WriteEntity> outputs = sem.getOutputs();
    SessionState ss = SessionState.get();
    HiveOperation op = ss.getHiveOperation();
    Hive db = sem.getDb();
    if (op != null) {
      if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
          || op.equals(HiveOperation.CREATETABLE)) {
        ss.getAuthorizer().authorize(
            db.getDatabase(SessionState.get().getCurrentDatabase()), null,
            HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
      } else {
        if (op.equals(HiveOperation.IMPORT)) {
          ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem;
          if (!isa.existsTable()) {
            ss.getAuthorizer().authorize(
                db.getDatabase(SessionState.get().getCurrentDatabase()), null,
                HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
          }
        }
      }
      if (outputs != null && outputs.size() > 0) {
        for (WriteEntity write : outputs) {

          if (write.getType() == WriteEntity.Type.PARTITION) {
            Partition part = db.getPartition(write.getTable(), write
                .getPartition().getSpec(), false);
            if (part != null) {
              ss.getAuthorizer().authorize(write.getPartition(), null,
                      op.getOutputRequiredPrivileges());
              continue;
            }
          }

          if (write.getTable() != null) {
            ss.getAuthorizer().authorize(write.getTable(), null,
                    op.getOutputRequiredPrivileges());
          }
        }

      }
    }

    if (inputs != null && inputs.size() > 0) {

      Map<Table, List<String>> tab2Cols = new HashMap<Table, List<String>>();
      Map<Partition, List<String>> part2Cols = new HashMap<Partition, List<String>>();

      Map<String, Boolean> tableUsePartLevelAuth = new HashMap<String, Boolean>();
      for (ReadEntity read : inputs) {
        Table tbl = read.getTable();
        if ((read.getPartition() != null) || (tbl.isPartitioned())) {
          String tblName = tbl.getTableName();
          if (tableUsePartLevelAuth.get(tblName) == null) {
            boolean usePartLevelPriv = (tbl.getParameters().get(
                "PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE"
                .equalsIgnoreCase(tbl.getParameters().get(
                    "PARTITION_LEVEL_PRIVILEGE"))));
            if (usePartLevelPriv) {
              tableUsePartLevelAuth.put(tblName, Boolean.TRUE);
            } else {
              tableUsePartLevelAuth.put(tblName, Boolean.FALSE);
            }
          }
        }
      }

      if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
          || op.equals(HiveOperation.QUERY)) {
        SemanticAnalyzer querySem = (SemanticAnalyzer) sem;
        ParseContext parseCtx = querySem.getParseContext();
        Map<TableScanOperator, Table> tsoTopMap = parseCtx.getTopToTable();

        for (Map.Entry<String, Operator<? extends OperatorDesc>> topOpMap : querySem
            .getParseContext().getTopOps().entrySet()) {
          Operator<? extends OperatorDesc> topOp = topOpMap.getValue();
          if (topOp instanceof TableScanOperator
              && tsoTopMap.containsKey(topOp)) {
            TableScanOperator tableScanOp = (TableScanOperator) topOp;
            Table tbl = tsoTopMap.get(tableScanOp);
            List<Integer> neededColumnIds = tableScanOp.getNeededColumnIDs();
            List<FieldSchema> columns = tbl.getCols();
            List<String> cols = new ArrayList<String>();
            if (neededColumnIds != null && neededColumnIds.size() > 0) {
              for (int i = 0; i < neededColumnIds.size(); i++) {
                cols.add(columns.get(neededColumnIds.get(i)).getName());
              }
            } else {
              for (int i = 0; i < columns.size(); i++) {
                cols.add(columns.get(i).getName());
              }
            }
            //map may not contain all sources, since input list may have been optimized out
            //or non-existent tho such sources may still be referenced by the TableScanOperator
            //if it's null then the partition probably doesn't exist so let's use table permission
            if (tbl.isPartitioned() &&
                tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE) {
              String alias_id = topOpMap.getKey();

              PrunedPartitionList partsList = PartitionPruner.prune(tableScanOp,
                  parseCtx, alias_id);
              Set<Partition> parts = partsList.getPartitions();
              for (Partition part : parts) {
                List<String> existingCols = part2Cols.get(part);
                if (existingCols == null) {
                  existingCols = new ArrayList<String>();
                }
                existingCols.addAll(cols);
                part2Cols.put(part, existingCols);
              }
            } else {
              List<String> existingCols = tab2Cols.get(tbl);
              if (existingCols == null) {
                existingCols = new ArrayList<String>();
              }
              existingCols.addAll(cols);
              tab2Cols.put(tbl, existingCols);
            }
          }
        }
      }

      // cache the results for table authorization
      Set<String> tableAuthChecked = new HashSet<String>();
      for (ReadEntity read : inputs) {
        Table tbl = read.getTable();
        if (read.getPartition() != null) {
          Partition partition = read.getPartition();
          tbl = partition.getTable();
          // use partition level authorization
          if (tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE) {
            List<String> cols = part2Cols.get(partition);
            if (cols != null && cols.size() > 0) {
              ss.getAuthorizer().authorize(partition.getTable(),
                  partition, cols, op.getInputRequiredPrivileges(),
                  null);
            } else {
              ss.getAuthorizer().authorize(partition,
                  op.getInputRequiredPrivileges(), null);
            }
            continue;
          }
        }

        // if we reach here, it means it needs to do a table authorization
        // check, and the table authorization may already happened because of other
        // partitions
        if (tbl != null && !tableAuthChecked.contains(tbl.getTableName()) &&
            !(tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE)) {
          List<String> cols = tab2Cols.get(tbl);
          if (cols != null && cols.size() > 0) {
            ss.getAuthorizer().authorize(tbl, null, cols,
                op.getInputRequiredPrivileges(), null);
          } else {
            ss.getAuthorizer().authorize(tbl, op.getInputRequiredPrivileges(),
                null);
          }
          tableAuthChecked.add(tbl.getTableName());
        }
      }
View Full Code Here

  public CommandProcessorResponse run(String command) throws CommandNeedRetryException {
    CommandProcessorResponse cpr = runInternal(command);
    if(cpr.getResponseCode() == 0) {
      return cpr;
    }
    SessionState ss = SessionState.get();
    if(ss == null) {
      return cpr;
    }
    MetaDataFormatter mdf = MetaDataFormatUtils.getFormatter(ss.getConf());
    if(!(mdf instanceof JsonMetaDataFormatter)) {
      return cpr;
    }
    /*Here we want to encode the error in machine readable way (e.g. JSON)
     * Ideally, errorCode would always be set to a canonical error defined in ErrorMsg.
View Full Code Here

  private final LogHelper console;
  private Configuration conf;

  public CliDriver() {
    SessionState ss = SessionState.get();
    conf = (ss != null) ? ss.getConf() : new Configuration();
    Log LOG = LogFactory.getLog("CliDriver");
    console = new LogHelper(LOG);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.session.SessionState

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.