Package org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters

Examples of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter


        if (converters == null) {
          converters = new HashMap<ObjectInspector, Converter>();
        }

        Converter converter = converters.get(oi);
        if (converter == null) {
          converter = ObjectInspectorConverters.getConverter(oi,
              returnObjectInspector);
          converters.put(oi, converter);
        }
        converted = converter.convert(o);
      }
      return converted;
    }
View Full Code Here


      // Create the converters
      conversionNeeded = false;
      converters = new Converter[parameterOIs.length];
      for (int i = 0; i < parameterOIs.length; i++) {
        Converter pc = ObjectInspectorConverters.getConverter(parameterOIs[i],
            methodParameterOIs[i]);
        converters[i] = pc;
        // Conversion is needed?
        conversionNeeded = conversionNeeded
            || (!(pc instanceof IdentityConverter));
View Full Code Here

    if (arguments[i].getCategory() != ObjectInspector.Category.PRIMITIVE) {
      throw new UDFArgumentTypeException(i + 1, "Only primitive type arguments are accepted but "
      + arguments[i].getTypeName() + " is passed. as  arguments");
    }

    Converter converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[i],
          PrimitiveObjectInspectorFactory.writableStringObjectInspector);

    return converter;
  }
View Full Code Here

    if (arguments[i].getCategory() != ObjectInspector.Category.PRIMITIVE) {
      throw new UDFArgumentTypeException(i + 1, "Only primitive type arguments are accepted but "
      + arguments[i].getTypeName() + " is passed. as  arguments");
    }
    PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
    Converter converter;
    switch (inputType) {
    case INT:
    case SHORT:
    case BYTE:
      converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[i],
View Full Code Here

      String skewedValue, ExprNodeColumnDesc keyCol) {
      ObjectInspector inputOI = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(
        TypeInfoFactory.stringTypeInfo);
      ObjectInspector outputOI = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(
        keyCol.getTypeInfo());
      Converter converter = ObjectInspectorConverters.getConverter(inputOI, outputOI);
      Object skewedValueObject = converter.convert(skewedValue);
      return new ExprNodeConstantDesc(keyCol.getTypeInfo(), skewedValueObject);
    }
View Full Code Here

    if (arguments[i].getCategory() != ObjectInspector.Category.PRIMITIVE) {
      throw new UDFArgumentTypeException(i + 1, "Only primitive type arguments are accepted but "
    + arguments[i].getTypeName() + " is passed. as  arguments");
    }
    PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
    Converter converter;
    switch (inputType) {
    case STRING:
    case CHAR:
    case VARCHAR:
      converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[i],
View Full Code Here

        if (type == null) {
          throw new SemanticException("Column " + key + " not found");
        }
        // Create the corresponding hive expression to filter on partition columns.
        PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
        Converter converter = ObjectInspectorConverters.getConverter(
          TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(TypeInfoFactory.stringTypeInfo),
          TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(pti));
        ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true);
        ExprNodeGenericFuncDesc op = makeBinaryPredicate(
            operator, column, new ExprNodeConstantDesc(pti, converter.convert(val)));
        // If it's multi-expr filter (e.g. a='5', b='2012-01-02'), AND with previous exprs.
        expr = (expr == null) ? op : makeBinaryPredicate("and", expr, op);
        names.add(key);
      }
      if (expr == null) continue;
View Full Code Here

      String skewedValue, ExprNodeColumnDesc keyCol) {
      ObjectInspector inputOI = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(
        TypeInfoFactory.stringTypeInfo);
      ObjectInspector outputOI = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(
        keyCol.getTypeInfo());
      Converter converter = ObjectInspectorConverters.getConverter(inputOI, outputOI);
      Object skewedValueObject = converter.convert(skewedValue);
      return new ExprNodeConstantDesc(keyCol.getTypeInfo(), skewedValueObject);
    }
View Full Code Here

        if (converters == null) {
          converters = new HashMap<ObjectInspector, Converter>();
        }

        Converter converter = converters.get(oi);
        if (converter == null) {
          converter = ObjectInspectorConverters.getConverter(oi,
              returnObjectInspector);
          converters.put(oi, converter);
        }
        converted = converter.convert(o);
      }
      return converted;
    }
View Full Code Here

      // Create the converters
      conversionNeeded = false;
      converters = new Converter[parameterOIs.length];
      for (int i = 0; i < parameterOIs.length; i++) {
        Converter pc = ObjectInspectorConverters.getConverter(parameterOIs[i],
            methodParameterOIs[i]);
        converters[i] = pc;
        // Conversion is needed?
        conversionNeeded = conversionNeeded
            || (!(pc instanceof IdentityConverter));
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.