Package org.apache.hadoop.hive.ql.processors

Examples of org.apache.hadoop.hive.ql.processors.CommandProcessor


      int ret = 0;
      String errorMessage = "";
      String SQLState = null;

      try {
        CommandProcessor proc = CommandProcessorFactory.get(tokens[0]);
        CommandProcessorResponse response = null;
        if (proc != null) {
          if (proc instanceof Driver) {
            ((Driver)proc).destroy();
            isHiveQuery = true;
            response = driver.run(cmd);
          } else {
            isHiveQuery = false;
            response = proc.run(cmd_1);
          }

          ret = response.getResponseCode();
          SQLState = response.getSQLState();
          errorMessage = response.getErrorMessage();
View Full Code Here


          ss.out.println(StringUtils.join(s, "\n"));
        }
      }

    } else {
      CommandProcessor proc = CommandProcessorFactory.get(tokens[0], (HiveConf)conf);
      if (proc != null) {
        if (proc instanceof Driver) {
          Driver qp = (Driver) proc;
          PrintStream out = ss.out;
          long start = System.currentTimeMillis();

          ret = qp.run(cmd).getResponseCode();
          if (ret != 0) {
            qp.close();
            return ret;
          }

          ArrayList<String> res = new ArrayList<String>();
         
          if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_HEADER)) {
            // Print the column names
            boolean first_col = true;
            Schema sc = qp.getSchema();
            for (FieldSchema fs : sc.getFieldSchemas()) {
              if (!first_col) {
                out.print('\t');
              }
              out.print(fs.getName());
              first_col = false;
            }
            out.println();
          }

          try {
            while (qp.getResults(res)) {
              for (String r : res) {
                out.println(r);
              }
              res.clear();
              if (out.checkError()) {
                break;
              }
            }
          } catch (IOException e) {
            console.printError("Failed with exception " + e.getClass().getName() + ":"
                + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
            ret = 1;
          }

          int cret = qp.close();
          if (ret == 0) {
            ret = cret;
          }

          long end = System.currentTimeMillis();
          if (end > start) {
            double timeTaken = (end - start) / 1000.0;
            console.printInfo("Time taken: " + timeTaken + " seconds", null);
          }

        } else {
          ret = proc.run(cmd_1).getResponseCode();
        }
      }
    }

    return ret;
View Full Code Here

          err.println("[Thrift Error]: Hive server is not cleaned due to thrift exception: "
              + errMsg);
        }
      }
    } else { // local mode
      CommandProcessor proc = CommandProcessorFactory.get(tokens[0], (HiveConf) conf);
      ret = processLocalCmd(cmd, proc, ss);
    }

    return ret;
  }
View Full Code Here

          err.println("[Thrift Error]: Hive server is not cleaned due to thrift exception: "
              + errMsg);
        }
      }
    } else { // local mode
      CommandProcessor proc = CommandProcessorFactory.get(tokens[0], (HiveConf)conf);
      int tryCount = 0;
      boolean needRetry;

      do {
        try {
          needRetry = false;
          if (proc != null) {
            if (proc instanceof Driver) {
              Driver qp = (Driver) proc;
              PrintStream out = ss.out;
              long start = System.currentTimeMillis();
              if (ss.getIsVerbose()) {
                out.println(cmd);
              }

              qp.setTryCount(tryCount);
              ret = qp.run(cmd).getResponseCode();
              if (ret != 0) {
                qp.close();
                return ret;
              }

              ArrayList<String> res = new ArrayList<String>();

              if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_HEADER)) {
                // Print the column names
                boolean first_col = true;
                Schema sc = qp.getSchema();
                for (FieldSchema fs : sc.getFieldSchemas()) {
                  if (!first_col) {
                    out.print('\t');
                  }
                  out.print(fs.getName());
                  first_col = false;
                }
                out.println();
              }

              try {
                while (qp.getResults(res)) {
                  for (String r : res) {
                    out.println(r);
                  }
                  res.clear();
                  if (out.checkError()) {
                    break;
                  }
                }
              } catch (IOException e) {
                console.printError("Failed with exception " + e.getClass().getName() + ":"
                    + e.getMessage(), "\n"
                    + org.apache.hadoop.util.StringUtils.stringifyException(e));
                ret = 1;
              }

              int cret = qp.close();
              if (ret == 0) {
                ret = cret;
              }

              long end = System.currentTimeMillis();
              if (end > start) {
                double timeTaken = (end - start) / 1000.0;
                console.printInfo("Time taken: " + timeTaken + " seconds", null);
              }

            } else {
              if (ss.getIsVerbose()) {
                ss.out.println(tokens[0] + " " + cmd_1);
              }
              ret = proc.run(cmd_1).getResponseCode();
            }
          }
        } catch (CommandNeedRetryException e) {
          console.printInfo("Retry query with a different approach...");
          tryCount++;
View Full Code Here

      int ret = 0;
      String errorMessage = "";
      String SQLState = null;

      try {
        CommandProcessor proc = CommandProcessorFactory.get(tokens[0]);
        CommandProcessorResponse response = null;
        if (proc != null) {
          if (proc instanceof Driver) {
            isHiveQuery = true;
            driver = (Driver) proc;
            // In Hive server mode, we are not able to retry in the FetchTask
            // case, when calling fetch quueries since execute() has returned.
            // For now, we disable the test attempts.
            driver.setTryCount(Integer.MAX_VALUE);
            response = driver.run(cmd);
          } else {
            isHiveQuery = false;
            driver = null;
            // need to reset output for each non-Hive query
            setupSessionIO(session);
            response = proc.run(cmd_1);
          }

          ret = response.getResponseCode();
          SQLState = response.getSQLState();
          errorMessage = response.getErrorMessage();
View Full Code Here

      String cmd = queries.get(i);
      String cmd_trimmed = cmd.trim();
      String[] tokens = cmd_trimmed.split("\\s+");
      String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();

      CommandProcessor proc = CommandProcessorFactory.get(tokens[0]);
      if (proc != null) {
        if (proc instanceof Driver) {
          Driver qp = (Driver) proc;
          qp.setTryCount(Integer.MAX_VALUE);
          try {
          queryRet.add(Integer.valueOf(qp.run(cmd).getResponseCode()));
          ArrayList<String> res = new ArrayList<String>();
          try {
            while (qp.getResults(res)) {
              ArrayList<String> resCopy = new ArrayList<String>();
              resCopy.addAll(res);
              resultBucket.add(resCopy);
              if (resultBucket.size() > resultBucketMaxSize) {
                resultBucket.remove(0);
              }
              for (String row : res) {
                if (ss != null) {
                  if (ss.out != null) {
                    ss.out.println(row);
                  }
                } else {
                  throw new RuntimeException("ss was null");
                }
              }
              res.clear();
            }

          } catch (IOException ex) {
            l4j.error(getSessionName() + " getting results " + getResultFile()
                + " caused exception.", ex);
          }
          } catch (CommandNeedRetryException e) {
            // this should never happen since we Driver.setTryCount(Integer.MAX_VALUE)
            l4j.error(getSessionName() + " Exception when executing", e);
          } finally {
            qp.close();
          }
        } else {
          try {
            queryRet.add(Integer.valueOf(proc.run(cmd_1).getResponseCode()));
          } catch (CommandNeedRetryException e) {
            // this should never happen if there is no bug
            l4j.error(getSessionName() + " Exception when executing", e);
          }
        }
View Full Code Here

      int ret = 0;
      String errorMessage = "";
      String SQLState = null;

      try {
        CommandProcessor proc = CommandProcessorFactory.get(tokens[0]);
        if(proc != null) {
          if (proc instanceof Driver) {
            isHiveQuery = true;
            Driver.DriverResponse response = driver.runCommand(cmd);
            ret = response.getResponseCode();
            SQLState = response.getSQLState();
            errorMessage = response.getErrorMessage();
          } else {
            isHiveQuery = false;
            ret = proc.run(cmd_1);
          }
        }
      } catch (Exception e) {
        HiveServerException ex = new HiveServerException();
        ex.setMessage("Error running query: " + e.toString());
View Full Code Here

      String cmd = queries.get(i);
      String cmd_trimmed = cmd.trim();
      String[] tokens = cmd_trimmed.split("\\s+");
      String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();

      CommandProcessor proc = CommandProcessorFactory.get(tokens[0]);
      if (proc !=null){
        if (proc instanceof Driver) {
          Driver qp = (Driver) proc;
          queryRet.add ( new Integer(qp.run(cmd)));
          Vector<String> res = new Vector<String>();
          try {
            while (qp.getResults(res)) {
        resultBucket.add(res);
              if (resultBucket.size() > resultBucketMaxSize)
        resultBucket.remove(0);
              for (String row : res) {
    if (ss != null) {
              if (ss.out != null)
                ss.out.println(row);
    } else {
      throw new RuntimeException ("ss was null" );
    }
          }
//          res.clear();
            }
          } catch (IOException ex) {
            l4j.error(this.getSessionName() + " getting results "
            + this.getResultFile() + " caused exception.", ex);
          }
          qp.close();
        } else {
          queryRet.add( new Integer(proc.run(cmd_1) ) );
        }
      } else {
        //processor was null
  l4j.error(this.getSessionName() +
  " query processor was not found for query "+ cmd );
View Full Code Here

          err.println("[Thrift Error]: Hive server is not cleaned due to thrift exception: "
              + errMsg);
        }
      }
    } else { // local mode
      CommandProcessor proc = CommandProcessorFactory.get(tokens[0], (HiveConf) conf);
      ret = processLocalCmd(cmd, proc, ss);
    }

    return ret;
  }
View Full Code Here

          ss.out.println(StringUtils.join(s, "\n"));
        }
      }

    } else {
      CommandProcessor proc = CommandProcessorFactory.get(tokens[0]);
      if (proc != null) {
        if (proc instanceof Driver) {
          Driver qp = (Driver) proc;
          PrintStream out = ss.out;
          long start = System.currentTimeMillis();

          ret = qp.run(cmd).getResponseCode();
          if (ret != 0) {
            qp.close();
            return ret;
          }

          ArrayList<String> res = new ArrayList<String>();
          try {
            while (qp.getResults(res)) {
              for (String r : res) {
                out.println(r);
              }
              res.clear();
              if (out.checkError()) {
                break;
              }
            }
          } catch (IOException e) {
            console.printError("Failed with exception " + e.getClass().getName() + ":"
                + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
            ret = 1;
          }

          int cret = qp.close();
          if (ret == 0) {
            ret = cret;
          }

          long end = System.currentTimeMillis();
          if (end > start) {
            double timeTaken = (end - start) / 1000.0;
            console.printInfo("Time taken: " + timeTaken + " seconds", null);
          }

        } else {
          ret = proc.run(cmd_1).getResponseCode();
        }
      }
    }

    return ret;
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.processors.CommandProcessor

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.