Package org.apache.hadoop.hive.serde2.typeinfo

Examples of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo


     * @return HCatFieldSchema representation of FieldSchema
     * @throws HCatException
     */
    public static HCatFieldSchema getHCatFieldSchema(FieldSchema fs) throws HCatException {
        String fieldName = fs.getName();
        TypeInfo baseTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(fs.getType());
        return getHCatFieldSchema(fieldName, baseTypeInfo);
    }
View Full Code Here


            + "contact:struct<phno:string,email:string>,"
            + "currently_registered_courses:array<string>,"
            + "current_grades:map<string,string>,"
            + "phnos:array<struct<phno:string,type:string>>,blah:array<int>>";

        TypeInfo ti = TypeInfoUtils.getTypeInfoFromTypeString(typeString);

        HCatSchema hsch = HCatSchemaUtils.getHCatSchemaFromTypeString(typeString);
        System.out.println(ti.getTypeName());
        System.out.println(hsch.toString());
        assertEquals(ti.getTypeName(),hsch.toString());
        assertEquals(hsch.toString(),typeString);
    }
View Full Code Here

      // Put partial aggregation results in reduceValues
      int inputField = reduceKeys.size();

      for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {

        TypeInfo type = reduceSinkInputRowResolver.getColumnInfos().get(
            inputField).getType();
        reduceValues.add(new ExprNodeColumnDesc(type,
            getColumnInternalName(inputField), "", false));
        inputField++;
        outputValueColumnNames.add(getColumnInternalName(reduceValues.size() - 1));
View Full Code Here

    List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
    for (int i = 0; i < grpByExprs.size(); ++i) {
      ASTNode grpbyExpr = grpByExprs.get(i);
      String field = getColumnInternalName(i);
      outputColumnNames.add(field);
      TypeInfo typeInfo = reduceSinkInputRowResolver2.getExpression(
          grpbyExpr).getType();
      ExprNodeColumnDesc inputExpr = new ExprNodeColumnDesc(typeInfo, field,
          "", false);
      reduceKeys.add(inputExpr);
      ColumnInfo colInfo = new ColumnInfo(Utilities.ReduceField.KEY.toString()
          + "." + field, typeInfo, "", false);
      reduceSinkOutputRowResolver2.putExpression(grpbyExpr, colInfo);
      colExprMap.put(colInfo.getInternalName(), inputExpr);
    }
    // Get partial aggregation results and store in reduceValues
    ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
    int inputField = reduceKeys.size();
    HashMap<String, ASTNode> aggregationTrees = parseInfo
        .getAggregationExprsForClause(dest);
    for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
      String field = getColumnInternalName(inputField);
      ASTNode t = entry.getValue();
      TypeInfo typeInfo = reduceSinkInputRowResolver2.getExpression(t)
          .getType();
      reduceValues.add(new ExprNodeColumnDesc(typeInfo, field, "", false));
      inputField++;
      String col = getColumnInternalName(reduceValues.size() - 1);
      outputColumnNames.add(col);
View Full Code Here

      // here only deals with non-partition columns. We deal with partition columns next
      for (int i = 0; i < columnNumber; i++) {
        ObjectInspector tableFieldOI = tableFields.get(i)
            .getFieldObjectInspector();
        TypeInfo tableFieldTypeInfo = TypeInfoUtils
            .getTypeInfoFromObjectInspector(tableFieldOI);
        TypeInfo rowFieldTypeInfo = rowFields.get(i).getType();
        ExprNodeDesc column = new ExprNodeColumnDesc(rowFieldTypeInfo,
            rowFields.get(i).getInternalName(), "", false);
        // LazySimpleSerDe can convert any types to String type using
        // JSON-format.
        if (!tableFieldTypeInfo.equals(rowFieldTypeInfo)
            && !(isLazySimpleSerDe
            && tableFieldTypeInfo.getCategory().equals(Category.PRIMITIVE) && tableFieldTypeInfo
            .equals(TypeInfoFactory.stringTypeInfo))) {
          // need to do some conversions here
          converted = true;
          if (tableFieldTypeInfo.getCategory() != Category.PRIMITIVE) {
            // cannot convert to complex types
            column = null;
          } else {
            column = TypeCheckProcFactory.DefaultExprProcessor
                .getFuncExprNodeDesc(tableFieldTypeInfo.getTypeName(),
                    column);
          }
          if (column == null) {
            String reason = "Cannot convert column " + i + " from "
                + rowFieldTypeInfo + " to " + tableFieldTypeInfo + ".";
            throw new SemanticException(ErrorMsg.TARGET_TABLE_COLUMN_MISMATCH
                .getMsg(qb.getParseInfo().getDestForClause(dest), reason));
          }
        }
        expressions.add(column);
      }
    }

    // deal with dynamic partition columns: convert ExprNodeDesc type to String??
    if (dynPart && dpCtx != null && dpCtx.getNumDPCols() > 0) {
      // DP columns starts with tableFields.size()
      for (int i = tableFields.size(); i < rowFields.size(); ++i ) {
        TypeInfo rowFieldTypeInfo = rowFields.get(i).getType();
        ExprNodeDesc column = new ExprNodeColumnDesc(
            rowFieldTypeInfo, rowFields.get(i).getInternalName(), "", false);
        expressions.add(column);
      }
      // converted = true; // [TODO]: should we check & convert type to String and set it to true?
View Full Code Here

    // Check column type
    int columnNumber = posns.size();
    ArrayList<ExprNodeDesc> expressions = new ArrayList<ExprNodeDesc>(columnNumber);
    for (Integer posn: posns) {
      ObjectInspector tableFieldOI = tableFields.get(posn).getFieldObjectInspector();
      TypeInfo tableFieldTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(tableFieldOI);
      TypeInfo rowFieldTypeInfo = rowFields.get(posn).getType();
      ExprNodeDesc column = new ExprNodeColumnDesc(rowFieldTypeInfo, rowFields.get(posn).getInternalName(),
                                                   rowFields.get(posn).getTabAlias(), rowFields.get(posn).getIsVirtualCol());

      if (convert && !tableFieldTypeInfo.equals(rowFieldTypeInfo)) {
        // need to do some conversions here
View Full Code Here

      keys.add(now.getKeyCols());
    }
    // implicit type conversion hierarchy
    for (int k = 0; k < keyLength; k++) {
      // Find the common class for type conversion
      TypeInfo commonType = keys.get(0).get(k).getTypeInfo();
      for (int i = 1; i < right.length; i++) {
        TypeInfo a = commonType;
        TypeInfo b = keys.get(i).get(k).getTypeInfo();
        commonType = FunctionRegistry.getCommonClassForComparison(a, b);
        if (commonType == null) {
          throw new SemanticException(
              "Cannot do equality join on different types: " + a.getTypeName()
              + " and " + b.getTypeName());
        }
      }
      // Add implicit type conversion if necessary
      for (int i = 0; i < right.length; i++) {
        if (!commonType.equals(keys.get(i).get(k).getTypeInfo())) {
View Full Code Here

            + ", and on the right side of the UNION at column position: " +
            getPositionFromInternalName(rInfo.getInternalName())
            + ". Column positions should match for a UNION"));
      }
      //try widening coversion, otherwise fail union
      TypeInfo commonTypeInfo = FunctionRegistry.getCommonClassForComparison(lInfo.getType(),
          rInfo.getType());
      if (commonTypeInfo == null) {
        throw new SemanticException(generateErrorMessage(tabref,
            "Schema of both sides of union should match: Column " + field
            + " is of type " + lInfo.getType().getTypeName()
View Full Code Here

        ExprNodeConstantDesc fieldName = (ExprNodeConstantDesc) children.get(1);
        assert (fieldName.getValue() instanceof String);

        // Calculate result TypeInfo
        String fieldNameString = (String) fieldName.getValue();
        TypeInfo objectTypeInfo = object.getTypeInfo();

        // Allow accessing a field of list element structs directly from a list
        boolean isList = (object.getTypeInfo().getCategory() == ObjectInspector.Category.LIST);
        if (isList) {
          objectTypeInfo = ((ListTypeInfo) objectTypeInfo)
              .getListElementTypeInfo();
        }
        if (objectTypeInfo.getCategory() != Category.STRUCT) {
          throw new SemanticException(ErrorMsg.INVALID_DOT.getMsg(expr));
        }
        TypeInfo t = ((StructTypeInfo) objectTypeInfo)
            .getStructFieldTypeInfo(fieldNameString);
        if (isList) {
          t = TypeInfoFactory.getListTypeInfo(t);
        }

        desc = new ExprNodeFieldDesc(t, children.get(0), fieldNameString,
            isList);

      } else if (funcText.equals("[")) {
        // "[]" : LSQUARE/INDEX Expression
        assert (children.size() == 2);

        // Check whether this is a list or a map
        TypeInfo myt = children.get(0).getTypeInfo();

        if (myt.getCategory() == Category.LIST) {
          // Only allow integer index for now
          if (!(children.get(1) instanceof ExprNodeConstantDesc)
              || !(((ExprNodeConstantDesc) children.get(1)).getTypeInfo()
              .equals(TypeInfoFactory.intTypeInfo))) {
            throw new SemanticException(SemanticAnalyzer.generateErrorMessage(
                  expr,
                  ErrorMsg.INVALID_ARRAYINDEX_CONSTANT.getMsg()));
          }

          // Calculate TypeInfo
          TypeInfo t = ((ListTypeInfo) myt).getListElementTypeInfo();
          desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry
              .getGenericUDFForIndex(), children);
        } else if (myt.getCategory() == Category.MAP) {
          // Only allow constant map key for now
          if (!(children.get(1) instanceof ExprNodeConstantDesc)) {
            throw new SemanticException(SemanticAnalyzer.generateErrorMessage(
                  expr,
                  ErrorMsg.INVALID_MAPINDEX_CONSTANT.getMsg()));
          }
          if (!(((ExprNodeConstantDesc) children.get(1)).getTypeInfo()
              .equals(((MapTypeInfo) myt).getMapKeyTypeInfo()))) {
            throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_TYPE
                .getMsg(expr));
          }
          // Calculate TypeInfo
          TypeInfo t = ((MapTypeInfo) myt).getMapValueTypeInfo();
          desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry
              .getGenericUDFForIndex(), children);
        } else {
          throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr,
              myt.getTypeName()));
View Full Code Here

  static Map<TypeInfo, Integer> numericTypes = new HashMap<TypeInfo, Integer>();
  static List<TypeInfo> numericTypeList = new ArrayList<TypeInfo>();

  static void registerNumericType(String typeName, int level) {
    TypeInfo t = TypeInfoFactory.getPrimitiveTypeInfo(typeName);
    numericTypeList.add(t);
    numericTypes.put(t, level);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.