Package org.apache.hadoop.hive.serde2.typeinfo

Examples of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo


          astKeyName = stripIdentifierQuotes(astKeyName);
        }
        String colType = partCols.get(astKeyName);
        ObjectInspector inputOI = astExprNodePair.getValue().getWritableObjectInspector();

        TypeInfo expectedType =
            TypeInfoUtils.getTypeInfoFromTypeString(colType);
        ObjectInspector outputOI =
            TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(expectedType);
        Object value = null;
        try {
View Full Code Here


      return 0;
    }
    if (argumentPassed.getCategory().equals(Category.LIST)
        && argumentAccepted.getCategory().equals(Category.LIST)) {
      // lists are compatible if and only-if the elements are compatible
      TypeInfo argumentPassedElement = ((ListTypeInfo) argumentPassed)
          .getListElementTypeInfo();
      TypeInfo argumentAcceptedElement = ((ListTypeInfo) argumentAccepted)
          .getListElementTypeInfo();
      return matchCost(argumentPassedElement, argumentAcceptedElement, exact);
    }
    if (argumentPassed.getCategory().equals(Category.MAP)
        && argumentAccepted.getCategory().equals(Category.MAP)) {
      // lists are compatible if and only-if the elements are compatible
      TypeInfo argumentPassedKey = ((MapTypeInfo) argumentPassed)
          .getMapKeyTypeInfo();
      TypeInfo argumentAcceptedKey = ((MapTypeInfo) argumentAccepted)
          .getMapKeyTypeInfo();
      TypeInfo argumentPassedValue = ((MapTypeInfo) argumentPassed)
          .getMapValueTypeInfo();
      TypeInfo argumentAcceptedValue = ((MapTypeInfo) argumentAccepted)
          .getMapValueTypeInfo();
      int cost1 = matchCost(argumentPassedKey, argumentAcceptedKey, exact);
      int cost2 = matchCost(argumentPassedValue, argumentAcceptedValue, exact);
      if (cost1 < 0 || cost2 < 0) {
        return -1;
View Full Code Here

            TypeInfoUtils.getParameterTypeInfos(m, argumentsPassed.size());
        Iterator<TypeInfo> argsPassedIter = argumentsPassed.iterator();
        for (TypeInfo acceptedType : argumentsAccepted) {
          // Check the affinity of the argument passed in with the accepted argument,
          // based on the PrimitiveGrouping
          TypeInfo passedType = argsPassedIter.next();
          if (acceptedType.getCategory() == Category.PRIMITIVE
              && passedType.getCategory() == Category.PRIMITIVE) {
            PrimitiveGrouping acceptedPg = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(
                ((PrimitiveTypeInfo) acceptedType).getPrimitiveCategory());
            PrimitiveGrouping passedPg = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(
                ((PrimitiveTypeInfo) passedType).getPrimitiveCategory());
            if (acceptedPg == passedPg) {
View Full Code Here

        }

        Iterator<TypeInfo> referenceIterator = referenceArguments.iterator();

        for (TypeInfo accepted: argumentsAccepted) {
          TypeInfo reference = referenceIterator.next();

          boolean acceptedIsPrimitive = false;
          PrimitiveCategory acceptedPrimCat = PrimitiveCategory.UNKNOWN;
          if (accepted.getCategory() == Category.PRIMITIVE) {
            acceptedIsPrimitive = true;
View Full Code Here

        return null;
      }
      return "Constant " + value + " is not string "
        + (isIntegralSupported ? "or integral ": "") + "type: " + expr.getTypeInfo().getTypeName();
    } else if (isCol) {
      TypeInfo type = expr.getTypeInfo();
      if (type.getTypeName().equals(serdeConstants.STRING_TYPE_NAME)
          || (isIntegralSupported && isIntegralType(type.getTypeName()))) {
        String colName = ((ExprNodeColumnDesc)expr).getColumn();
        for (FieldSchema fs: tab.getPartCols()) {
          if (fs.getName().equals(colName)) {
            if (fs.getType().equals(serdeConstants.STRING_TYPE_NAME)
                || (isIntegralSupported && isIntegralType(fs.getType()))) {
              return null;
            }
            return "Partition column " + fs.getName() + " is not string "
              + (isIntegralSupported ? "or integral ": "") + "type: " + fs.getType();
          }
        }
        assert(false); // cannot find the partition column!
     } else {
        return "Column " + expr.getExprString() + " is not string "
          + (isIntegralSupported ? "or integral ": "") + "type: " + type.getTypeName();
     }
    } else if (expr instanceof ExprNodeGenericFuncDesc) {
      ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) expr;
      GenericUDF func = funcDesc.getGenericUDF();
      if (!supportedJDOFuncs(func)) {
View Full Code Here

        }
      });
      walker.startWalking(Collections.singletonList(expression), null);
    }
    for (FieldSchema argument : arguments) {
      TypeInfo colType =
          TypeInfoUtils.getTypeInfoFromTypeString(argument.getType());
      rowResolver.put("", argument.getName(),
          new ColumnInfo(argument.getName(), colType, "", false));
      macroColNames.add(argument.getName());
      macroColTypes.add(colType);
View Full Code Here

      // Put partial aggregation results in reduceValues
      int inputField = reduceKeys.size();

      for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {

        TypeInfo type = reduceSinkInputRowResolver.getColumnInfos().get(
            inputField).getType();
        ExprNodeColumnDesc exprDesc = new ExprNodeColumnDesc(type,
            getColumnInternalName(inputField), "", false);
        reduceValues.add(exprDesc);
        inputField++;
View Full Code Here

    List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
    for (int i = 0; i < grpByExprs.size(); ++i) {
      ASTNode grpbyExpr = grpByExprs.get(i);
      String field = getColumnInternalName(i);
      outputColumnNames.add(field);
      TypeInfo typeInfo = reduceSinkInputRowResolver2.getExpression(
          grpbyExpr).getType();
      ExprNodeColumnDesc inputExpr = new ExprNodeColumnDesc(typeInfo, field,
          "", false);
      reduceKeys.add(inputExpr);
      ColumnInfo colInfo = new ColumnInfo(Utilities.ReduceField.KEY.toString()
          + "." + field, typeInfo, "", false);
      reduceSinkOutputRowResolver2.putExpression(grpbyExpr, colInfo);
      colExprMap.put(colInfo.getInternalName(), inputExpr);
    }

    // add a key for reduce sink
    if (groupingSetsPresent) {
      // Note that partitioning fields dont need to change, since it is either
      // partitioned randomly, or by all grouping keys + distinct keys
      processGroupingSetReduceSinkOperator(
          reduceSinkInputRowResolver2,
          reduceSinkOutputRowResolver2,
          reduceKeys,
          outputColumnNames,
          colExprMap);
    }

    // Get partial aggregation results and store in reduceValues
    ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
    int inputField = reduceKeys.size();
    HashMap<String, ASTNode> aggregationTrees = parseInfo
        .getAggregationExprsForClause(dest);
    for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
      String field = getColumnInternalName(inputField);
      ASTNode t = entry.getValue();
      TypeInfo typeInfo = reduceSinkInputRowResolver2.getExpression(t)
          .getType();
      ExprNodeColumnDesc exprDesc = new ExprNodeColumnDesc(typeInfo, field, "", false);
      reduceValues.add(exprDesc);
      inputField++;
      String col = getColumnInternalName(reduceValues.size() - 1);
View Full Code Here

      // here only deals with non-partition columns. We deal with partition columns next
      for (int i = 0; i < columnNumber; i++) {
        ObjectInspector tableFieldOI = tableFields.get(i)
            .getFieldObjectInspector();
        TypeInfo tableFieldTypeInfo = TypeInfoUtils
            .getTypeInfoFromObjectInspector(tableFieldOI);
        TypeInfo rowFieldTypeInfo = rowFields.get(i).getType();
        ExprNodeDesc column = new ExprNodeColumnDesc(rowFieldTypeInfo,
            rowFields.get(i).getInternalName(), "", false, rowFields.get(i).isSkewedCol());
        // LazySimpleSerDe can convert any types to String type using
        // JSON-format.
        if (!tableFieldTypeInfo.equals(rowFieldTypeInfo)
            && !(isLazySimpleSerDe
                && tableFieldTypeInfo.getCategory().equals(Category.PRIMITIVE) && tableFieldTypeInfo
                  .equals(TypeInfoFactory.stringTypeInfo))) {
          // need to do some conversions here
          converted = true;
          if (tableFieldTypeInfo.getCategory() != Category.PRIMITIVE) {
            // cannot convert to complex types
            column = null;
          } else {
            column = ParseUtils.createConversionCast(
                column, (PrimitiveTypeInfo)tableFieldTypeInfo);
          }
          if (column == null) {
            String reason = "Cannot convert column " + i + " from "
                + rowFieldTypeInfo + " to " + tableFieldTypeInfo + ".";
            throw new SemanticException(ErrorMsg.TARGET_TABLE_COLUMN_MISMATCH
                .getMsg(qb.getParseInfo().getDestForClause(dest), reason));
          }
        }
        expressions.add(column);
      }
    }

    // deal with dynamic partition columns: convert ExprNodeDesc type to String??
    if (dynPart && dpCtx != null && dpCtx.getNumDPCols() > 0) {
      // DP columns starts with tableFields.size()
      for (int i = tableFields.size(); i < rowFields.size(); ++i) {
        TypeInfo rowFieldTypeInfo = rowFields.get(i).getType();
        ExprNodeDesc column = new ExprNodeColumnDesc(
            rowFieldTypeInfo, rowFields.get(i).getInternalName(), "", false);
        expressions.add(column);
      }
      // converted = true; // [TODO]: should we check & convert type to String and set it to true?
View Full Code Here

    // Check column type
    int columnNumber = posns.size();
    ArrayList<ExprNodeDesc> expressions = new ArrayList<ExprNodeDesc>(columnNumber);
    for (Integer posn : posns) {
      ObjectInspector tableFieldOI = tableFields.get(posn).getFieldObjectInspector();
      TypeInfo tableFieldTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(tableFieldOI);
      TypeInfo rowFieldTypeInfo = rowFields.get(posn).getType();
      ExprNodeDesc column = new ExprNodeColumnDesc(rowFieldTypeInfo,
          rowFields.get(posn).getInternalName(), rowFields.get(posn).getTabAlias(),
          rowFields.get(posn).getIsVirtualCol());

      if (convert && !tableFieldTypeInfo.equals(rowFieldTypeInfo)) {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.typeinfo.TypeInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.