Package org.apache.hadoop.hive.ql.session

Examples of org.apache.hadoop.hive.ql.session.SessionState


    int end = cmd.indexOf(" ");
    return (end == -1) ? "" : cmd.substring(end, cmd.length());
  }

  private String fetchFilesNotInLocalFilesystem(String cmd) {
    SessionState ss = SessionState.get();
    String progName = getScriptProgName(cmd);

    if (progName.matches("("+ SessionState.getMatchingSchemaAsRegex() +")://.*")) {
      String filePath = ss.add_resource(ResourceType.FILE, progName, true);
      if (filePath == null) {
        throw new RuntimeException("Could not download the resource: " + progName);
      }
      Path p = new Path(filePath);
      String fileName = p.getName();
View Full Code Here


  /**
   * If the user script command needs any modifications - do it here
   */
  private String getFixedCmd(String cmd) {
    SessionState ss = SessionState.get();
    if(ss == null)
      return cmd;

    // for local mode - replace any references to packaged files by name with
    // the reference to the original file path
    if(ss.getConf().get("mapred.job.tracker", "local").equals("local")) {
      Set<String> files = ss.list_resource(SessionState.ResourceType.FILE, null);
      if((files != null) && !files.isEmpty()) {
        int end = cmd.indexOf(" ");
        String prog = (end == -1) ? cmd : cmd.substring(0, end);
        String args = (end == -1) ? "" :  cmd.substring(end, cmd.length());

View Full Code Here

    super();
  }

  public static String getRealFiles(Configuration conf) {
    // fill in local files to be added to the task environment
    SessionState ss = SessionState.get();
    Set<String> files = (ss == null) ? null : ss.list_resource(
        SessionState.ResourceType.FILE, null);
    if (files != null) {
      ArrayList<String> realFiles = new ArrayList<String>(files.size());
      for (String one : files) {
        try {
View Full Code Here

      report = " map = " + Math.round(rj.mapProgress() * 100) + "%,  reduce ="
          + Math.round(rj.reduceProgress() * 100) + "%";

      if (!report.equals(lastReport)) {

        SessionState ss = SessionState.get();
        if (ss != null) {
          ss.getHiveHistory().setTaskCounters(
              SessionState.get().getQueryId(), getId(), rj);
          ss.getHiveHistory().setTaskProperty(
              SessionState.get().getQueryId(), getId(),
              Keys.TASK_HADOOP_PROGRESS, report);
          ss.getHiveHistory().progressTask(
              SessionState.get().getQueryId(), this);
        }
        console.printInfo(report);
        lastReport = report;
      }
View Full Code Here

  public void initialize (HiveConf conf) {
    isdone = false;
    this.conf = conf;
   
    SessionState ss = SessionState.get();
    try {
      if (ss == null) {
        // test case - no session setup perhaps
        db = Hive.get(conf);
      } else {
        // normal case - session has handle to db
        db = ss.getDb();
      }
    } catch (HiveException e) {
      // Bail out ungracefully - we should never hit
      // this here - but would have hit it in SemanticAnalyzer
      LOG.error(StringUtils.stringifyException(e));
View Full Code Here

    /**
     * A constructor.
     */
    public HiveServerHandler() throws MetaException {
      super(HiveServer.class.getName());
      session = new SessionState(new HiveConf(SessionState.class));
      SessionState.start(session);
      HiveConf conf = session.get().getConf();
      session.in = null;
      session.out = null;
      session.err = null;
View Full Code Here

  /**
   * Called at the start of job Driver.run()
   */
  public void startQuery(String cmd, String id) {
    SessionState ss = SessionState.get();
    if (ss == null)
      return;
    QueryInfo ji = new QueryInfo();

    ji.hm.put(Keys.QUERY_ID.name(), id);
View Full Code Here

   *
   * @param task
   */
  public void startTask(String queryId, Task<? extends Serializable> task,
      String taskName) {
    SessionState ss = SessionState.get();
    if (ss == null)
      return;
    TaskInfo ti = new TaskInfo();

    ti.hm.put(Keys.QUERY_ID.name(), ss.getQueryId());
    ti.hm.put(Keys.TASK_ID.name(), task.getId());
    ti.hm.put(Keys.TASK_NAME.name(), taskName);

    String id = queryId + ":" + task.getId();
    taskInfoMap.put(id, ti);
View Full Code Here

    }
  }

  private static int processCmd(String cmd) {

    SessionState ss = SessionState.get();
    long start = System.currentTimeMillis();

    cmd = cmd.trim();
    String firstToken = cmd.split("\\s+")[0].trim();

    if (firstToken.equalsIgnoreCase("set")) {
      return new SetProcessor().run(cmd.substring(firstToken.length()).trim()).getResponseCode();
    } else if (firstToken.equalsIgnoreCase("dfs")) {
      return new DfsProcessor(ss.getConf()).run(cmd.substring(firstToken.length()).trim()).getResponseCode();
    }

    HCatDriver driver = new HCatDriver();

    int ret = driver.run(cmd).getResponseCode();
View Full Code Here

    {
        try {
            for (String part : parts) {
                // Partition names are URL encoded. We decode the names unless Hive
                // is configured to use the encoded names.
                SessionState ss = SessionState.get();
                if (ss != null && ss.getConf() != null &&
                      !ss.getConf().getBoolVar(HiveConf.ConfVars.HIVE_DECODE_PARTITION_NAME)) {
                    outStream.writeBytes(part);
                } else {
                    outStream.writeBytes(FileUtils.unescapePathName(part));
                }
                outStream.write(terminator);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.session.SessionState

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.