Package org.apache.hadoop.hive.ql.session

Examples of org.apache.hadoop.hive.ql.session.SessionState


      return this.cmd;
    }
  }

  public void saveSession(QueryState qs) {
    SessionState oldss = SessionState.get();
    if (oldss != null && oldss.getHiveOperation() != null) {
      qs.init(oldss.getHiveOperation(), oldss.getCmd());
    }
  }
View Full Code Here


      qs.init(oldss.getHiveOperation(), oldss.getCmd());
    }
  }

  public void restoreSession(QueryState qs) {
    SessionState ss = SessionState.get();
    if (ss != null && qs != null && qs.isInitialized()) {
      ss.setCmd(qs.getCmd());
      ss.setCommandType(qs.getOp());
    }
  }
View Full Code Here

  private void doAuthorization(BaseSemanticAnalyzer sem)
    throws HiveException, AuthorizationException {
    HashSet<ReadEntity> inputs = sem.getInputs();
    HashSet<WriteEntity> outputs = sem.getOutputs();
    SessionState ss = SessionState.get();
    HiveOperation op = ss.getHiveOperation();
    Hive db = sem.getDb();
    if (op != null) {
      if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
          || op.equals(HiveOperation.CREATETABLE)) {
        ss.getAuthorizer().authorize(
            db.getDatabase(db.getCurrentDatabase()), null,
            HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
      } else {
        if (op.equals(HiveOperation.IMPORT)) {
          ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem;
          if (!isa.existsTable()) {
            ss.getAuthorizer().authorize(
                db.getDatabase(db.getCurrentDatabase()), null,
                HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
          }
        }
      }
      if (outputs != null && outputs.size() > 0) {
        for (WriteEntity write : outputs) {

          if (write.getType() == WriteEntity.Type.PARTITION) {
            Partition part = db.getPartition(write.getTable(), write
                .getPartition().getSpec(), false);
            if (part != null) {
              ss.getAuthorizer().authorize(write.getPartition(), null,
                      op.getOutputRequiredPrivileges());
              continue;
            }
          }

          if (write.getTable() != null) {
            ss.getAuthorizer().authorize(write.getTable(), null,
                    op.getOutputRequiredPrivileges());
          }
        }

      }
    }

    if (inputs != null && inputs.size() > 0) {

      Map<Table, List<String>> tab2Cols = new HashMap<Table, List<String>>();
      Map<Partition, List<String>> part2Cols = new HashMap<Partition, List<String>>();

      Map<String, Boolean> tableUsePartLevelAuth = new HashMap<String, Boolean>();
      for (ReadEntity read : inputs) {
        if (read.getPartition() != null) {
          Table tbl = read.getTable();
          String tblName = tbl.getTableName();
          if (tableUsePartLevelAuth.get(tblName) == null) {
            boolean usePartLevelPriv = (tbl.getParameters().get(
                "PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE"
                .equalsIgnoreCase(tbl.getParameters().get(
                    "PARTITION_LEVEL_PRIVILEGE"))));
            if (usePartLevelPriv) {
              tableUsePartLevelAuth.put(tblName, Boolean.TRUE);
            } else {
              tableUsePartLevelAuth.put(tblName, Boolean.FALSE);
            }
          }
        }
      }

      if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
          || op.equals(HiveOperation.QUERY)) {
        SemanticAnalyzer querySem = (SemanticAnalyzer) sem;
        ParseContext parseCtx = querySem.getParseContext();
        Map<TableScanOperator, Table> tsoTopMap = parseCtx.getTopToTable();

        for (Map.Entry<String, Operator<? extends OperatorDesc>> topOpMap : querySem
            .getParseContext().getTopOps().entrySet()) {
          Operator<? extends OperatorDesc> topOp = topOpMap.getValue();
          if (topOp instanceof TableScanOperator
              && tsoTopMap.containsKey(topOp)) {
            TableScanOperator tableScanOp = (TableScanOperator) topOp;
            Table tbl = tsoTopMap.get(tableScanOp);
            List<Integer> neededColumnIds = tableScanOp.getNeededColumnIDs();
            List<FieldSchema> columns = tbl.getCols();
            List<String> cols = new ArrayList<String>();
            if (neededColumnIds != null && neededColumnIds.size() > 0) {
              for (int i = 0; i < neededColumnIds.size(); i++) {
                cols.add(columns.get(neededColumnIds.get(i)).getName());
              }
            } else {
              for (int i = 0; i < columns.size(); i++) {
                cols.add(columns.get(i).getName());
              }
            }
            //map may not contain all sources, since input list may have been optimized out
            //or non-existent tho such sources may still be referenced by the TableScanOperator
            //if it's null then the partition probably doesn't exist so let's use table permission
            if (tbl.isPartitioned() &&
                tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE) {
              String alias_id = topOpMap.getKey();
              PrunedPartitionList partsList = PartitionPruner.prune(parseCtx
                  .getTopToTable().get(topOp), parseCtx.getOpToPartPruner()
                  .get(topOp), parseCtx.getConf(), alias_id, parseCtx
                  .getPrunedPartitions());
              Set<Partition> parts = new HashSet<Partition>();
              parts.addAll(partsList.getConfirmedPartns());
              parts.addAll(partsList.getUnknownPartns());
              for (Partition part : parts) {
                List<String> existingCols = part2Cols.get(part);
                if (existingCols == null) {
                  existingCols = new ArrayList<String>();
                }
                existingCols.addAll(cols);
                part2Cols.put(part, existingCols);
              }
            } else {
              List<String> existingCols = tab2Cols.get(tbl);
              if (existingCols == null) {
                existingCols = new ArrayList<String>();
              }
              existingCols.addAll(cols);
              tab2Cols.put(tbl, existingCols);
            }
          }
        }
      }


      //cache the results for table authorization
      Set<String> tableAuthChecked = new HashSet<String>();
      for (ReadEntity read : inputs) {
        Table tbl = null;
        if (read.getPartition() != null) {
          tbl = read.getPartition().getTable();
          // use partition level authorization
          if (tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE) {
            List<String> cols = part2Cols.get(read.getPartition());
            if (cols != null && cols.size() > 0) {
              ss.getAuthorizer().authorize(read.getPartition().getTable(),
                  read.getPartition(), cols, op.getInputRequiredPrivileges(),
                  null);
            } else {
              ss.getAuthorizer().authorize(read.getPartition(),
                  op.getInputRequiredPrivileges(), null);
            }
            continue;
          }
        } else if (read.getTable() != null) {
          tbl = read.getTable();
        }

        // if we reach here, it means it needs to do a table authorization
        // check, and the table authorization may already happened because of other
        // partitions
        if (tbl != null && !tableAuthChecked.contains(tbl.getTableName())) {
          List<String> cols = tab2Cols.get(tbl);
          if (cols != null && cols.size() > 0) {
            ss.getAuthorizer().authorize(tbl, null, cols,
                op.getInputRequiredPrivileges(), null);
          } else {
            ss.getAuthorizer().authorize(tbl, op.getInputRequiredPrivileges(),
                null);
          }
          tableAuthChecked.add(tbl.getTableName());
        }
      }
View Full Code Here

  private final LogHelper console;
  private Configuration conf;

  public CliDriver() {
    SessionState ss = SessionState.get();
    conf = (ss != null) ? ss.getConf() : new Configuration();
    Log LOG = LogFactory.getLog("CliDriver");
    console = new LogHelper(LOG);
  }
View Full Code Here

          continue;
        }

        ret = processCmd(command);
        //wipe cli query state
        SessionState ss = SessionState.get();
        ss.setCommandType(null);
        command = "";
        lastRet = ret;
        boolean ignoreErrors = HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIIGNOREERRORS);
        if (ret != 0 && !ignoreErrors) {
          CommandProcessorFactory.clean((HiveConf) conf);
View Full Code Here

* number of reducers has been correctly set.
*/
public class VerifyNumReducersForBucketsHook implements ExecuteWithHookContext {

  public void run(HookContext hookContext) {
    SessionState ss = SessionState.get();
    Assert.assertNotNull("SessionState returned null");

    List<MapRedStats> stats = ss.getLastMapRedStatsList();
    Assert.assertEquals("Number of MapReduce jobs is incorrect", 1, stats.size());

    Assert.assertEquals("NumReducers is incorrect", 10, stats.get(0).getNumReduce());
  }
View Full Code Here

    }
    try {
      String actualDir = parentTmpDir + "/test";
      HiveConf conf = new HiveConf(SessionState.class);
      conf.set(HiveConf.ConfVars.HIVEHISTORYFILELOC.toString(), actualDir);
      SessionState ss = new CliSessionState(conf);
      HiveHistory hiveHistory = new HiveHistory(ss);
      Path actualPath = new Path(actualDir);
      if (!fs.exists(actualPath)) {
        fail("Query location path is not exist :" + actualPath.toString());
      }
View Full Code Here

    throw new IllegalArgumentException(prefix + "'" + value
        + "' is not a boolean");
  }

  private void dumpOptions(Properties p) {
    SessionState ss = SessionState.get();
    SortedMap<String, String> sortedMap = new TreeMap<String, String>();
    sortedMap.put("silent", (ss.getIsSilent() ? "on" : "off"));
    for (Object one : p.keySet()) {
      String oneProp = (String) one;
      String oneValue = p.getProperty(oneProp);
      sortedMap.put(oneProp, oneValue);
    }

    // Inserting hive variables
    for (String s : ss.getHiveVariables().keySet()) {
      sortedMap.put(SetProcessor.HIVEVAR_PREFIX + s, ss.getHiveVariables().get(s));
    }

    for (Map.Entry<String, String> entries : sortedMap.entrySet()) {
      ss.out.println(entries.getKey() + "=" + entries.getValue());
    }
View Full Code Here

    }

  }

  private void dumpOption(String s) {
    SessionState ss = SessionState.get();

    if (ss.getConf().get(s) != null) {
      ss.out.println(s + "=" + ss.getConf().get(s));
    } else if (ss.getHiveVariables().containsKey(s)) {
      ss.out.println(s + "=" + ss.getHiveVariables().get(s));
    } else {
      ss.out.println(s + " is undefined");
    }
  }
View Full Code Here

  public void init() {
  }

  private CommandProcessorResponse setVariable(String varname, String varvalue){
    SessionState ss = SessionState.get();
    if (varvalue.contains("\n")){
      ss.err.println("Warning: Value had a \\n character in it.");
    }
    if (varname.startsWith(SetProcessor.ENV_PREFIX)){
      ss.err.println("env:* variables can not be set.");
      return new CommandProcessorResponse(1);
    } else if (varname.startsWith(SetProcessor.SYSTEM_PREFIX)){
      String propName = varname.substring(SetProcessor.SYSTEM_PREFIX.length());
      System.getProperties().setProperty(propName, new VariableSubstitution().substitute(ss.getConf(),varvalue));
      return new CommandProcessorResponse(0);
    } else if (varname.startsWith(SetProcessor.HIVECONF_PREFIX)){
      String propName = varname.substring(SetProcessor.HIVECONF_PREFIX.length());
      String error = setConf(varname, propName, varvalue, false);
      return new CommandProcessorResponse(error == null ? 0 : 1, error, null);
    } else if (varname.startsWith(SetProcessor.HIVEVAR_PREFIX)) {
      String propName = varname.substring(SetProcessor.HIVEVAR_PREFIX.length());
      ss.getHiveVariables().put(propName, new VariableSubstitution().substitute(ss.getConf(),varvalue));
      return new CommandProcessorResponse(0);
    } else {
      String error = setConf(varname, varname, varvalue, true);
      return new CommandProcessorResponse(error == null ? 0 : 1, error, null);
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.session.SessionState

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.