Package org.apache.hadoop.hive.ql.exec

Examples of org.apache.hadoop.hive.ql.exec.UDFArgumentException


  }

  @Override
  public StructObjectInspector initialize(ObjectInspector[] args) throws UDFArgumentException {
    if (args.length != 1) {
      throw new UDFArgumentException("explode() takes only one argument");
    }

    ArrayList<String> fieldNames = new ArrayList<String>();
    ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();

    switch (args[0].getCategory()) {
    case LIST:
      inputOI = args[0];
      fieldNames.add("col");
      fieldOIs.add(((ListObjectInspector)inputOI).getListElementObjectInspector());
      break;
    case MAP:
      inputOI = args[0];
      fieldNames.add("key");
      fieldNames.add("value");
      fieldOIs.add(((MapObjectInspector)inputOI).getMapKeyObjectInspector());
      fieldOIs.add(((MapObjectInspector)inputOI).getMapValueObjectInspector());
      break;
    default:
      throw new UDFArgumentException("explode() takes an array or a map as a parameter");
    }

    return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames,
        fieldOIs);
  }
View Full Code Here


  public ObjectInspector initialize(ObjectInspector[] arguments)
      throws UDFArgumentException {

    // Check if two arguments were passed
    if (arguments.length != ARG_COUNT) {
      throw new UDFArgumentException(
          "The function " + FUNC_NAME + " accepts "
              + ARG_COUNT + " arguments.");
    }

    // Check if ARRAY_IDX argument is of category LIST
    if (!arguments[ARRAY_IDX].getCategory().equals(Category.LIST)) {
      throw new UDFArgumentTypeException(ARRAY_IDX,
          "\"" + org.apache.hadoop.hive.serde.Constants.LIST_TYPE_NAME + "\" "
          + "expected at function ARRAY_CONTAINS, but "
          + "\"" + arguments[ARRAY_IDX].getTypeName() + "\" "
          + "is found");
    }

    arrayOI = (ListObjectInspector) arguments[ARRAY_IDX];
    arrayElementOI = arrayOI.getListElementObjectInspector();

    valueOI = arguments[VALUE_IDX];

    // Check if list element and value are of same type
    if (!ObjectInspectorUtils.compareTypes(arrayElementOI, valueOI)) {
      throw new UDFArgumentTypeException(VALUE_IDX,
          "\"" + arrayElementOI.getTypeName() + "\""
          + " expected at function ARRAY_CONTAINS, but "
          + "\"" + valueOI.getTypeName() + "\""
          + " is found");
    }

    // Check if the comparison is supported for this type
    if (!ObjectInspectorUtils.compareSupported(valueOI)) {
      throw new UDFArgumentException("The function " + FUNC_NAME
          + " does not support comparison for "
          + "\"" + valueOI.getTypeName() + "\""
          + " types");
    }
View Full Code Here

    TypeInfo[] parameters = paramInfo.getParameters();

    if (parameters.length == 0) {
      if (!paramInfo.isAllColumns()) {
        throw new UDFArgumentException("Argument expected");
      }
      assert !paramInfo.isDistinct() : "DISTINCT not supported with *";
    } else {
      if (parameters.length > 1 && !paramInfo.isDistinct()) {
        throw new UDFArgumentException("DISTINCT keyword must be specified");
      }
      assert !paramInfo.isAllColumns() : "* not supported in expression list";
    }

    return new GenericUDAFCountEvaluator().setCountAllColumns(
View Full Code Here

      // For now, if a bigint is going to be cast to a double throw an error or warning
      if ((oiTypeInfo0.equals(TypeInfoFactory.stringTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) ||
          (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.stringTypeInfo))) {
        if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase("strict")) {
          throw new UDFArgumentException(ErrorMsg.NO_COMPARE_BIGINT_STRING.getMsg());
        } else {
          console.printError("WARNING: Comparing a bigint and a string may result in a loss of precision.");
        }
      } else if ((oiTypeInfo0.equals(TypeInfoFactory.doubleTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) ||
          (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.doubleTypeInfo))) {
        if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase("strict")) {
          throw new UDFArgumentException(ErrorMsg.NO_COMPARE_BIGINT_DOUBLE.getMsg());
        } else {
          console.printError("WARNING: Comparing a bigint and a double may result in a loss of precision.");
        }
      }
    }
View Full Code Here

    {

      FunctionInfo fi = FunctionRegistry.getFunctionInfo(udfName);
      if (fi == null)
      {
        throw new UDFArgumentException(udfName + " not found.");
      }

      GenericUDF genericUDF = fi.getGenericUDF();
      if (genericUDF == null)
      {
        throw new UDFArgumentException(udfName
            + " is an aggregation function or a table function.");
      }

      List<ExprNodeDesc> childrenList = new ArrayList<ExprNodeDesc>(
          children.length);
View Full Code Here

        private ObjectInspector hashOI, prevHashStandardOI;
        private Object prevHash;
        @Override
        public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
                if (arguments.length != 2) {
                        throw new UDFArgumentException("Exactly two argument is expected.");
                }

                hashOI = arguments[0];
                prevHashStandardOI=ObjectInspectorUtils.getStandardObjectInspector(hashOI,ObjectInspectorCopyOption.JAVA);
               
View Full Code Here

  private Object prevHash;
  private ArrayList<Object> queue = new ArrayList<Object>();
  @Override
  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length<2||arguments.length>4) {
      throw new UDFArgumentException("The function accepts more than two arguments.");
    }
   
    argumentOIs = arguments;
    prevHashStandardOI = ObjectInspectorUtils.getStandardObjectInspector(arguments[0],ObjectInspectorCopyOption.JAVA);
    valueStandardOI =  ObjectInspectorUtils.getStandardObjectInspector(arguments[1],ObjectInspectorCopyOption.JAVA);
View Full Code Here

  private ObjectInspector hashOI, valueOI, prevHashStandardOI, resultOI;
  private Object prevHash;
  @Override
  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 2) {
      throw new UDFArgumentException("Exactly two argument is expected.");
    }

    for(int i=0;i<arguments.length;i++){
      if (arguments[i].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(i,
View Full Code Here

  private ObjectInspector argumentIOs, prevHashKeyIO;
  private Object prevHashKey;
  @Override
  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length!=1) {
        throw new UDFArgumentException("The function ROW_NUMBER accepts 1 argument.");
      }
    result.set(1);
    argumentIOs = arguments[0];
    prevHashKeyIO = ObjectInspectorUtils.getStandardObjectInspector(arguments[0],ObjectInspectorCopyOption.JAVA);
    return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
View Full Code Here

  @Override
  public StructObjectInspector initialize(ObjectInspector[] args)
      throws UDFArgumentException {

    if (args.length != 1) {
      throw new UDFArgumentException("explode() takes only one argument");
    }

    if (args[0].getCategory() != ObjectInspector.Category.LIST) {
      throw new UDFArgumentException("explode() takes an array as a parameter");
    }
    listOI = (ListObjectInspector) args[0];

    ArrayList<String> fieldNames = new ArrayList<String>();
    ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.exec.UDFArgumentException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.