Package org.apache.drill.common.types.TypeProtos

Examples of org.apache.drill.common.types.TypeProtos.MajorType


    }


    private LogicalExpression getDrillCastFunctionFromOptiq(RexCall call){
      LogicalExpression arg = call.getOperands().get(0).accept(this);
      MajorType castType = null;

      switch(call.getType().getSqlTypeName().getName()){
      case "VARCHAR":
      case "CHAR":
        castType = Types.required(MinorType.VARCHAR).toBuilder().setWidth(call.getType().getPrecision()).build();
View Full Code Here


  @Override
  public void setup(OutputMutator output) throws ExecutionSetupException {
    try {
      for (int i = 0; i < selectedColumnNames.size(); i++) {
        MajorType type = Types.optional(getMinorTypeFromHiveTypeInfo(selectedColumnTypes.get(i)));
        MaterializedField field = MaterializedField.create(SchemaPath.getSimplePath(selectedColumnNames.get(i)), type);
        Class vvClass = TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode());
        vectors.add(output.addField(field, vvClass));
      }

      for (int i = 0; i < selectedPartitionNames.size(); i++) {
        MajorType type = Types.required(getMinorTypeFromHiveTypeInfo(selectedPartitionTypes.get(i)));
        MaterializedField field = MaterializedField.create(SchemaPath.getSimplePath(selectedPartitionNames.get(i)), type);
        Class vvClass = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getDataMode());
        pVectors.add(output.addField(field, vvClass));
      }
    } catch(SchemaChangeException e) {
View Full Code Here

  @Override
  public void setup(OutputMutator output) throws ExecutionSetupException {
    try {
      for (int i = 0; i < selectedColumnNames.size(); i++) {
        MajorType type = Types.optional(getMinorTypeFromHiveTypeInfo(selectedColumnTypes.get(i)));
        MaterializedField field = MaterializedField.create(SchemaPath.getSimplePath(selectedColumnNames.get(i)), type);
        Class vvClass = TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode());
        vectors.add(output.addField(field, vvClass));
      }

      for (int i = 0; i < selectedPartitionNames.size(); i++) {
        MajorType type = Types.required(getMinorTypeFromHiveTypeInfo(selectedPartitionTypes.get(i)));
        MaterializedField field = MaterializedField.create(SchemaPath.getSimplePath(selectedPartitionNames.get(i)), type);
        Class vvClass = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getDataMode());
        pVectors.add(output.addField(field, vvClass));
      }
    } catch(SchemaChangeException e) {
View Full Code Here

   * Function to read a value from the field reader, detect the type, construct the appropriate value holder
   * and use the value holder to write to the Map.
   */
  public static void writeToMapFromReader(FieldReader fieldReader, BaseWriter.MapWriter mapWriter, DrillBuf buffer) {

    MajorType valueMajorType = fieldReader.getType();
    MinorType valueMinorType = valueMajorType.getMinorType();

    switch (valueMinorType) {
      case TINYINT:
        TinyIntHolder tinyIntHolder = new TinyIntHolder();
        tinyIntHolder.value = fieldReader.readByte();
        mapWriter.tinyInt(MappifyUtility.fieldValue).write(tinyIntHolder);
        break;
      case SMALLINT:
        SmallIntHolder smallIntHolder = new SmallIntHolder();
        smallIntHolder.value = fieldReader.readShort();
        mapWriter.smallInt(MappifyUtility.fieldValue).write(smallIntHolder);
        break;
      case BIGINT:
        BigIntHolder bh = new BigIntHolder();
        bh.value = fieldReader.readLong();
        mapWriter.bigInt(MappifyUtility.fieldValue).write(bh);
        break;
      case INT:
        IntHolder ih = new IntHolder();
        ih.value = fieldReader.readInteger();
        mapWriter.integer(MappifyUtility.fieldValue).write(ih);
        break;
      case UINT1:
        UInt1Holder uInt1Holder = new UInt1Holder();
        uInt1Holder.value = fieldReader.readByte();
        mapWriter.uInt1(MappifyUtility.fieldValue).write(uInt1Holder);
        break;
      case UINT2:
        UInt2Holder uInt2Holder = new UInt2Holder();
        uInt2Holder.value = fieldReader.readCharacter();
        mapWriter.uInt2(MappifyUtility.fieldValue).write(uInt2Holder);
        break;
      case UINT4:
        UInt4Holder uInt4Holder = new UInt4Holder();
        uInt4Holder.value = fieldReader.readInteger();
        mapWriter.uInt4(MappifyUtility.fieldValue).write(uInt4Holder);
        break;
      case UINT8:
        UInt8Holder uInt8Holder = new UInt8Holder();
        uInt8Holder.value = fieldReader.readInteger();
        mapWriter.uInt8(MappifyUtility.fieldValue).write(uInt8Holder);
        break;
      case DECIMAL9:
        Decimal9Holder decimalHolder = new Decimal9Holder();
        decimalHolder.value = fieldReader.readBigDecimal().intValue();
        decimalHolder.scale = valueMajorType.getScale();
        decimalHolder.precision = valueMajorType.getPrecision();
        mapWriter.decimal9(MappifyUtility.fieldValue).write(decimalHolder);
        break;
      case DECIMAL18:
        Decimal18Holder decimal18Holder = new Decimal18Holder();
        decimal18Holder.value = fieldReader.readBigDecimal().longValue();
        decimal18Holder.scale = valueMajorType.getScale();
        decimal18Holder.precision = valueMajorType.getPrecision();
        mapWriter.decimal18(MappifyUtility.fieldValue).write(decimal18Holder);
        break;
      case DECIMAL28SPARSE:
        Decimal28SparseHolder decimal28Holder = new Decimal28SparseHolder();

        // Ensure that the buffer used to store decimal is of sufficient length
        buffer.reallocIfNeeded(decimal28Holder.WIDTH);
        decimal28Holder.scale = valueMajorType.getScale();
        decimal28Holder.precision = valueMajorType.getPrecision();
        decimal28Holder.buffer = buffer;
        decimal28Holder.start = 0;
        DecimalUtility.getSparseFromBigDecimal(fieldReader.readBigDecimal(), buffer, 0, decimal28Holder.scale,
            decimal28Holder.precision, decimal28Holder.nDecimalDigits);
        mapWriter.decimal28Sparse(MappifyUtility.fieldValue).write(decimal28Holder);
        break;
      case DECIMAL38SPARSE:
        Decimal38SparseHolder decimal38Holder = new Decimal38SparseHolder();

        // Ensure that the buffer used to store decimal is of sufficient length
        buffer.reallocIfNeeded(decimal38Holder.WIDTH);
        decimal38Holder.scale = valueMajorType.getScale();
        decimal38Holder.precision = valueMajorType.getPrecision();
        decimal38Holder.buffer = buffer;
        decimal38Holder.start = 0;
        DecimalUtility.getSparseFromBigDecimal(fieldReader.readBigDecimal(), buffer, 0, decimal38Holder.scale,
            decimal38Holder.precision, decimal38Holder.nDecimalDigits);

View Full Code Here

  public void updateColumnMetaData(String catalogName, String schemaName, String tableName, BatchSchema schema){

    columns = new ColumnMetaData[schema.getFieldCount()];
    for(int i = 0; i < schema.getFieldCount(); i++){
      MaterializedField f = schema.getColumn(i);
      MajorType t = f.getType();
      ColumnMetaData col = new ColumnMetaData( //
          i, // ordinal
          false, // autoIncrement
          true, // caseSensitive
          false, // searchable
          false, // currency
          f.getDataMode() == DataMode.OPTIONAL ? ResultSetMetaData.columnNullable : ResultSetMetaData.columnNoNulls, //nullability
          !Types.isUnSigned(t), // signed
          10, // display size.
          f.getAsSchemaPath().getRootSegment().getPath(), // label
          f.getAsSchemaPath().getRootSegment().getPath(), // columnname
          schemaName, // schemaname
          t.hasPrecision() ? t.getPrecision() : 0, // precision
          t.hasScale() ? t.getScale() : 0, // scale
          null, // tablename is null so sqlline doesn't try to retrieve primary keys.
          catalogName, // catalogname
          getAvaticaType(t)// sql type
          true, // readonly
          false, // writable
View Full Code Here

    return null;
  }

  @Override
  public Void visitCastExpression(CastExpression e, StringBuilder sb) throws RuntimeException {
    MajorType mt = e.getMajorType();

    sb.append("cast( (");
    e.getInput().accept(this, sb);
    sb.append(" ) as ");
    sb.append(mt.getMinorType().name());

    switch(mt.getMinorType()){
    case FLOAT4:
    case FLOAT8:
    case BIT:
    case INT:
    case TINYINT:
    case SMALLINT:
    case BIGINT:
    case UINT1:
    case UINT2:
    case UINT4:
    case UINT8:
    case DATE:
    case TIMESTAMP:
    case TIMESTAMPTZ:
    case TIME:
    case INTERVAL:
    case INTERVALDAY:
    case INTERVALYEAR:
      // do nothing else.
      break;
    case VAR16CHAR:
    case VARBINARY:
    case VARCHAR:
    case FIXED16CHAR:
    case FIXEDBINARY:
    case FIXEDCHAR:

      // add size in parens
      sb.append("(");
      sb.append(mt.getWidth());
      sb.append(")");
      break;
    case DECIMAL9:
    case DECIMAL18:
    case DECIMAL28DENSE:
    case DECIMAL28SPARSE:
    case DECIMAL38DENSE:
    case DECIMAL38SPARSE:

      // add scale and precision
      sb.append("(");
      sb.append(mt.getPrecision());
      sb.append(", ");
      sb.append(mt.getScale());
      sb.append(")");
      break;
    default:
      throw new UnsupportedOperationException(String.format("Unable to convert cast expression %s into string.", e));
    }
View Full Code Here

  @Override
  public MajorType getMajorType() {
    // If the return type of one of the "then" expression or "else" expression is nullable, return "if" expression
    // type as nullable
    MajorType majorType = elseExpression.getMajorType();
    if (majorType.getMode() == DataMode.OPTIONAL) {
      return majorType;
    }

    if (ifCondition.expression.getMajorType().getMode() == DataMode.OPTIONAL) {
      assert ifCondition.expression.getMajorType().getMinorType() == majorType.getMinorType();

      return ifCondition.expression.getMajorType();
    }

    return majorType;
View Full Code Here

 
  @Override
  public Void visitIfExpression(IfExpression ifExpr, ErrorCollector errors) throws RuntimeException {
    // confirm that all conditions are required boolean values.
    IfCondition cond = ifExpr.ifCondition;
    MajorType majorType = cond.condition.getMajorType();
    if ( majorType
        .getMinorType() != MinorType.BIT) {
      errors
          .addGeneralError(
              cond.condition.getPosition(),
              String
                  .format(
                      "Failure composing If Expression.  All conditions must return a boolean type.  Condition was of Type %s.",
                      majorType.getMinorType()));
    }

    // confirm that all outcomes are the same type.
    final MajorType mt = ifExpr.elseExpression.getMajorType();
    cond = ifExpr.ifCondition;
    MajorType innerT = cond.expression.getMajorType();
    if ((innerT.getMode() == DataMode.REPEATED && mt.getMode() != DataMode.REPEATED) || //
        ((innerT.getMinorType() != mt.getMinorType()) &&
        (innerT.getMode() != DataMode.OPTIONAL && mt.getMode() != DataMode.OPTIONAL &&
        (innerT.getMinorType() != MinorType.NULL && mt.getMinorType() != MinorType.NULL)))) {
      errors
          .addGeneralError(
              cond.condition.getPosition(),
              String
                  .format(
View Full Code Here

        return -1;
      }
    }

    for (int i = 0; i < holder.getParamCount(); i++) {
      MajorType argType = call.args.get(i).getMajorType();
      MajorType parmType = holder.getParmMajorType(i);

      //@Param FieldReader will match any type
      if (holder.isFieldReader(i)) {
//        if (Types.isComplex(call.args.get(i).getMajorType()) ||Types.isRepeated(call.args.get(i).getMajorType()) )
          continue;
//        else
//          return -1;
      }

      if (!TypeCastRules.isCastableWithNullHandling(argType, parmType, holder.getNullHandling())) {
        return -1;
      }

      Integer parmVal = ResolverTypePrecedence.precedenceMap.get(parmType
          .getMinorType());
      Integer argVal = ResolverTypePrecedence.precedenceMap.get(argType
          .getMinorType());

      if (parmVal == null) {
        throw new RuntimeException(String.format(
            "Precedence for type %s is not defined", parmType.getMinorType()
                .name()));
      }

      if (argVal == null) {
        throw new RuntimeException(String.format(
            "Precedence for type %s is not defined", argType.getMinorType()
                .name()));
      }

      if (parmVal - argVal < 0) {

        /* Precedence rules does not allow to implicitly cast, however check
         * if the seconday rules allow us to cast
         */
        Set<MinorType> rules;
        if ((rules = (ResolverTypePrecedence.secondaryImplicitCastRules.get(parmType.getMinorType()))) != null &&
            rules.contains(argType.getMinorType()) != false) {
          secondaryCast = true;
        } else {
          return -1;
        }
      }
      // Check null vs non-null, using same logic as that in Types.softEqual()
      // Only when the function uses NULL_IF_NULL, nullable and non-nullable are inter-changable.
      // Otherwise, the function implementation is not a match.
      if (argType.getMode() != parmType.getMode()) {
        // TODO - this does not seem to do what it is intended to
//        if (!((holder.getNullHandling() == NullHandling.NULL_IF_NULL) &&
//            (argType.getMode() == DataMode.OPTIONAL ||
//             argType.getMode() == DataMode.REQUIRED ||
//             parmType.getMode() == DataMode.OPTIONAL ||
//             parmType.getMode() == DataMode.REQUIRED )))
//          return -1;
        // if the function is designed to take optional with custom null handling, and a required
        // is being passed, increase the cost to account for a null check
        // this allows for a non-nullable implementation to be preferred
        if (holder.getNullHandling() == NullHandling.INTERNAL) {
          // a function that expects required output, but nullable was provided
          if (parmType.getMode() == DataMode.REQUIRED && argType.getMode() == DataMode.OPTIONAL) {
            return -1;
          }
          else if (parmType.getMode() == DataMode.OPTIONAL && argType.getMode() == DataMode.REQUIRED) {
            cost+= DATAMODE_CAST_COST;
          }
        }
      }

View Full Code Here

    if (schema != null && newContainer.getSchema().equals(schema)) {
      container.zeroVectors();
      BatchSchema schema = container.getSchema();
      for (int i = 0; i < container.getNumberOfColumns(); i++) {
        MaterializedField field = schema.getColumn(i);
        MajorType type = field.getType();
        ValueVector vOut = container.getValueAccessorById(TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()),
                container.getValueVectorId(field.getPath()).getFieldIds()).getValueVector();
        ValueVector vIn = newContainer.getValueAccessorById(TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()),
                newContainer.getValueVectorId(field.getPath()).getFieldIds()).getValueVector();
        TransferPair tp = vIn.makeTransferPair(vOut);
        tp.transfer();
      }
      return false;
View Full Code Here

TOP

Related Classes of org.apache.drill.common.types.TypeProtos.MajorType

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.