Package org.apache.hadoop.hive.ql.session.SessionState

Examples of org.apache.hadoop.hive.ql.session.SessionState.LogHelper


      TypeInfo oiTypeInfo1 = children.get(1).getTypeInfo();

      SessionState ss = SessionState.get();
      Configuration conf = (ss != null) ? ss.getConf() : new Configuration();

      LogHelper console = new LogHelper(LOG);

      // For now, if a bigint is going to be cast to a double throw an error or warning
      if ((oiTypeInfo0.equals(TypeInfoFactory.stringTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) ||
          (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.stringTypeInfo))) {
        if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase("strict")) {
          throw new UDFArgumentException(ErrorMsg.NO_COMPARE_BIGINT_STRING.getMsg());
        } else {
          console.printError("WARNING: Comparing a bigint and a string may result in a loss of precision.");
        }
      } else if ((oiTypeInfo0.equals(TypeInfoFactory.doubleTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) ||
          (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.doubleTypeInfo))) {
        if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase("strict")) {
          throw new UDFArgumentException(ErrorMsg.NO_COMPARE_BIGINT_DOUBLE.getMsg());
        } else {
          console.printError("WARNING: Comparing a bigint and a double may result in a loss of precision.");
        }
      }
    }

    ObjectInspector oi = genericUDF.initializeAndFoldConstants(childrenOIs);
View Full Code Here


    try {
      this.conf = conf;
      db = Hive.get(conf);
      rootTasks = new ArrayList<Task<? extends Serializable>>();
      LOG = LogFactory.getLog(this.getClass().getName());
      console = new LogHelper(LOG);
      idToTableNameMap = new HashMap<String, String>();
      inputs = new LinkedHashSet<ReadEntity>();
      outputs = new LinkedHashSet<WriteEntity>();
    } catch (Exception e) {
      throw new SemanticException(e);
View Full Code Here

    HiveConf hiveConf = new HiveConf(conf, PartialScanTask.class);

    Log LOG = LogFactory.getLog(PartialScanTask.class.getName());
    boolean isSilent = HiveConf.getBoolVar(conf,
        HiveConf.ConfVars.HIVESESSIONSILENT);
    LogHelper console = new LogHelper(LOG, isSilent);

    // print out the location of the log file for the user so
    // that it's easy to find reason for local mode execution failures
    for (Appender appender : Collections
        .list((Enumeration<Appender>) LogManager.getRootLogger()
            .getAllAppenders())) {
      if (appender instanceof FileAppender) {
        console.printInfo("Execution log at: "
            + ((FileAppender) appender).getFile());
      }
    }

    PartialScanWork mergeWork = new PartialScanWork(inputPaths);
View Full Code Here

  public CliDriver() {
    SessionState ss = SessionState.get();
    conf = (ss != null) ? ss.getConf() : new Configuration();
    Log LOG = LogFactory.getLog("CliDriver");
    console = new LogHelper(LOG);
  }
View Full Code Here

      LOG.error(StringUtils.stringifyException(e));
      throw new RuntimeException(e);
    }
    this.driverContext = driverContext;

    console = new LogHelper(LOG);
  }
View Full Code Here

    HiveConf hiveConf = new HiveConf(conf, BlockMergeTask.class);

    Log LOG = LogFactory.getLog(BlockMergeTask.class.getName());
    boolean isSilent = HiveConf.getBoolVar(conf,
        HiveConf.ConfVars.HIVESESSIONSILENT);
    LogHelper console = new LogHelper(LOG, isSilent);

    // print out the location of the log file for the user so
    // that it's easy to find reason for local mode execution failures
    for (Appender appender : Collections
        .list((Enumeration<Appender>) LogManager.getRootLogger()
            .getAllAppenders())) {
      if (appender instanceof FileAppender) {
        console.printInfo("Execution log at: "
            + ((FileAppender) appender).getFile());
      }
    }

    MergeWork mergeWork = new MergeWork(inputPaths, outputDir);
View Full Code Here

  }

  public MapredLocalTask(MapredLocalWork plan, JobConf job, boolean isSilent) throws HiveException {
    setWork(plan);
    this.job = job;
    console = new LogHelper(LOG, isSilent);
  }
View Full Code Here

  /**
   * Constructor when invoked from QL.
   */
  public ExecDriver() {
    super();
    console = new LogHelper(LOG);
    this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this);
  }
View Full Code Here

   * Constructor/Initialization for invocation as independent utility.
   */
  public ExecDriver(MapredWork plan, JobConf job, boolean isSilent) throws HiveException {
    setWork(plan);
    this.job = job;
    console = new LogHelper(LOG, isSilent);
    this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this);
  }
View Full Code Here

    } else {
      setupChildLog4j(conf);
    }

    Log LOG = LogFactory.getLog(ExecDriver.class.getName());
    LogHelper console = new LogHelper(LOG, isSilent);

    if (planFileName == null) {
      console.printError("Must specify Plan File Name");
      printUsage();
    }

    // print out the location of the log file for the user so
    // that it's easy to find reason for local mode execution failures
    for (Appender appender : Collections.list((Enumeration<Appender>) LogManager.getRootLogger()
        .getAllAppenders())) {
      if (appender instanceof FileAppender) {
        console.printInfo("Execution log at: " + ((FileAppender) appender).getFile());
      }
    }

    // the plan file should always be in local directory
    Path p = new Path(planFileName);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.session.SessionState.LogHelper

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.