Package org.apache.hadoop.hive.ql.processors

Examples of org.apache.hadoop.hive.ql.processors.CommandProcessor


      String cmd = queries.get(i);
      String cmd_trimmed = cmd.trim();
      String[] tokens = cmd_trimmed.split("\\s+");
      String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();

      CommandProcessor proc = CommandProcessorFactory.get(tokens[0]);
      if (proc != null) {
        if (proc instanceof Driver) {
          Driver qp = (Driver) proc;
          queryRet.add(new Integer(qp.run(cmd).getResponseCode()));
          ArrayList<String> res = new ArrayList<String>();
          try {
            while (qp.getResults(res)) {
              ArrayList<String> resCopy = new ArrayList<String>();
              resCopy.addAll(res);
              resultBucket.add(resCopy);
              if (resultBucket.size() > resultBucketMaxSize) {
                resultBucket.remove(0);
              }
              for (String row : res) {
                if (ss != null) {
                  if (ss.out != null) {
                    ss.out.println(row);
                  }
                } else {
                  throw new RuntimeException("ss was null");
                }
              }
              res.clear();
            }
          } catch (IOException ex) {
            l4j.error(getSessionName() + " getting results " + getResultFile()
                + " caused exception.", ex);
          }
          qp.close();
        } else {
          queryRet.add(new Integer(proc.run(cmd_1).getResponseCode()));
        }
      } else {
        // processor was null
        l4j.error(getSessionName()
            + " query processor was not found for query " + cmd);
View Full Code Here


        HiveConf conf = ss.getConf();
        SessionState.start(ss);
        String cmd_trimmed = cmd.trim();
        String[] tokens = cmd_trimmed.split("\\s+");
        String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();
        CommandProcessor proc = null;
        try {
            proc = CommandProcessorFactory.get(tokens[0], (HiveConf) conf );
        } catch (NoSuchMethodError e){
            try {
                Class c = Class.forName("org.apache.hadoop.hive.ql.processors.CommandProcessorFactory");
                Method m = c.getMethod("get", new Class [] { String.class } );
                proc =(CommandProcessor) m.invoke(null, new Object[]{ tokens[0]});
            } catch (Exception e1) {
                e1.printStackTrace();
            }
        }

        if (proc instanceof Driver) {
            Driver driver = (Driver) proc;
            int res = driver.compile(cmd);
            Schema sch = driver.getSchema();
            List <FieldSchema> fields = sch.getFieldSchemas();
            return fields;
        } else {
            ret = proc.run(cmd_1).getResponseCode();
        }
        return null;
    }
View Full Code Here

        String cmd_trimmed = cmd.trim();
        String[] tokens = cmd_trimmed.split("\\s+");
        String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();
        //this is changed in trunk
        CommandProcessor proc = null;
        try {
            proc = CommandProcessorFactory.get(tokens[0], (HiveConf) conf );
        } catch (NoSuchMethodError e){
            try {
                Class c = Class.forName("org.apache.hadoop.hive.ql.processors.CommandProcessorFactory");
                Method m = c.getMethod("get", new Class [] { String.class } );
                proc =(CommandProcessor) m.invoke(null, new Object[]{ tokens[0]});
            } catch (Exception e1) {
                //many things could go wrong here
                e1.printStackTrace();
            }
        }
        if (proc instanceof Driver) {
            ret = proc.run(cmd).getResponseCode();
        } else {
            ret = proc.run(cmd_1).getResponseCode();
        }

        return ret;
    }
View Full Code Here

            stringifyException(e));
        ret = 1;
      }
    }  else { // local mode
      try {
        CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) conf);
        ret = processLocalCmd(cmd, proc, ss);
      } catch (SQLException e) {
        console.printError("Failed processing command " + tokens[0] + " " + e.getLocalizedMessage(),
          org.apache.hadoop.util.StringUtils.stringifyException(e));
        ret = 1;
View Full Code Here

  public int doHiveCommand(String cmd, Configuration h2conf) {
    int ret = 40;
    String cmd_trimmed = cmd.trim();
    String[] tokens = cmd_trimmed.split("\\s+");
    String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();
    CommandProcessor proc = null;

    try {
      proc = CommandProcessorFactory.get(tokens, c);
    } catch (SQLException e) {
      throw new RuntimeException(e);
    }

    ArrayList<String> out = Lists.newArrayList();

    if (proc instanceof Driver) {
      try {
        ret = proc.run(cmd).getResponseCode();
        ((Driver) proc).getResults(out);
      } catch (CommandNeedRetryException ex) {
        Logger.getLogger(HiveTestEmbedded.class.getName()).log(Level.SEVERE, null, ex);
      } catch (IOException e) {
        e.printStackTrace();
      }
    } else {
      try {
        ret = proc.run(cmd_1).getResponseCode();
      } catch (CommandNeedRetryException ex) {
        Logger.getLogger(HiveTestEmbedded.class.getName()).log(Level.SEVERE, null, ex);
      }
    }
    return ret;
View Full Code Here

    ArrayList<String> results = new ArrayList<String>();
    CommandProcessorResponse processorResponse = null;
    String cmd_trimmed = cmd.trim();
    String[] tokens = cmd_trimmed.split("\\s+");
    String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();
    CommandProcessor proc = null;
    try {
      proc = CommandProcessorFactory.get(tokens, c);
    } catch (SQLException e) {
      throw new RuntimeException(e);
    }
    if (proc instanceof Driver) {
      try {
        processorResponse = proc.run(cmd);
      } catch (CommandNeedRetryException ex) {
        logger.log(Level.SEVERE, null, ex);
      }
    } else {
      try {
        processorResponse = proc.run(cmd_1);
      } catch (CommandNeedRetryException ex) {
        logger.log(Level.SEVERE, null, ex);
      }
    }
    try {
      if (proc instanceof org.apache.hadoop.hive.ql.Driver) {
        ((Driver) proc).getResults(results);
      } else {
        logger.info(String.format(
          "Processor of class %s is currently not supported for retrieving results", proc.getClass()
        ));
      }
    } catch (IOException e) {
      logger.log(Level.SEVERE, null, e);
    } catch (CommandNeedRetryException e) {
View Full Code Here

    String cmd = "show tables";
    String cmd_trimmed = cmd.trim();
    String[] tokens = cmd_trimmed.split("\\s+");
    String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();

    CommandProcessor proc = CommandProcessorFactory.get(tokens[0]);
    if (proc != null) {
      if (proc instanceof Driver) {
        Driver qp = (Driver) proc;
        qp.setTryCount(Integer.MAX_VALUE);
        try {
View Full Code Here

  public static ExecuteStatementOperation newExecuteStatementOperation(
      HiveSession parentSession, String statement, Map<String, String> confOverlay, boolean runAsync)
          throws HiveSQLException {
    String[] tokens = statement.trim().split("\\s+");
    CommandProcessor processor = null;
    try {
      processor = CommandProcessorFactory.getForHiveCommand(tokens, parentSession.getHiveConf());
    } catch (SQLException e) {
      throw new HiveSQLException(e.getMessage(), e.getSQLState(), e);
    }
View Full Code Here

    for (int i = 0; i < queries.size(); i++) {
      String cmd = queries.get(i);
      String cmd_trimmed = cmd.trim();
      String[] tokens = cmd_trimmed.split("\\s+");
      String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();
      CommandProcessor proc = null;
      try {
        proc = CommandProcessorFactory.get(tokens[0]);
      } catch (SQLException e) {
        l4j.error(getSessionName() + " error processing " + cmd, e);
      }
      if (proc != null) {
        if (proc instanceof Driver) {
          Driver qp = (Driver) proc;
          qp.setTryCount(Integer.MAX_VALUE);
          try {
          queryRet.add(Integer.valueOf(qp.run(cmd).getResponseCode()));
          ArrayList<String> res = new ArrayList<String>();
          try {
            while (qp.getResults(res)) {
              ArrayList<String> resCopy = new ArrayList<String>();
              resCopy.addAll(res);
              resultBucket.add(resCopy);
              if (resultBucket.size() > resultBucketMaxSize) {
                resultBucket.remove(0);
              }
              for (String row : res) {
                if (ss != null) {
                  if (ss.out != null) {
                    ss.out.println(row);
                  }
                } else {
                  throw new RuntimeException("ss was null");
                }
              }
              res.clear();
            }

          } catch (IOException ex) {
            l4j.error(getSessionName() + " getting results " + getResultFile()
                + " caused exception.", ex);
          }
          } catch (CommandNeedRetryException e) {
            // this should never happen since we Driver.setTryCount(Integer.MAX_VALUE)
            l4j.error(getSessionName() + " Exception when executing", e);
          } finally {
            qp.close();
          }
        } else {
          try {
            queryRet.add(Integer.valueOf(proc.run(cmd_1).getResponseCode()));
          } catch (CommandNeedRetryException e) {
            // this should never happen if there is no bug
            l4j.error(getSessionName() + " Exception when executing", e);
          }
        }
View Full Code Here

          err.println("[Thrift Error]: Hive server is not cleaned due to thrift exception: "
              + errMsg);
        }
      }
    } else { // local mode
      CommandProcessor proc = CommandProcessorFactory.get(tokens[0], (HiveConf) conf);
      ret = processLocalCmd(cmd, proc, ss);
    }

    return ret;
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.processors.CommandProcessor

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.