Package org.apache.hadoop.hive.serde2.io

Examples of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable


    public VectorUDAFAvgDecimal() {
      super();
      partialResult = new Object[2];
      resultCount = new LongWritable();
      resultSum = new HiveDecimalWritable();
      partialResult[0] = resultCount;
      partialResult[1] = resultSum;

    }
View Full Code Here


        int[] selected) {

      for (int j=0; j< batchSize; ++j) {
        int i = selected[j];
        if (!isNull[i]) {
          HiveDecimalWritable value = vector[i];
          myagg.sumValueWithNullCheck(value, this.sumScale);
        }
      }
    }
View Full Code Here

        myagg.sum.set(HiveDecimal.ZERO);
        myagg.count = 0;
      }

      for (int i=0; i< batchSize; ++i) {
        HiveDecimalWritable value = vector[selected[i]];
        myagg.sumValueNoNullCheck(value, this.sumScale);
      }
    }
View Full Code Here

        int batchSize,
        boolean[] isNull) {

      for(int i=0;i<batchSize;++i) {
        if (!isNull[i]) {
          HiveDecimalWritable value = vector[i];
          myagg.sumValueWithNullCheck(value, this.sumScale);
        }
      }
    }
View Full Code Here

        myagg.sum.set(HiveDecimal.ZERO);
        myagg.count = 0;
      }

      for (int i=0;i<batchSize;++i) {
        HiveDecimalWritable value = vector[i];
        myagg.sumValueNoNullCheck(value, this.sumScale);
      }
    }
View Full Code Here

      this.inputExpression = inputExpression;
    }

    public VectorUDAFSumDecimal() {
      super();
      scratchDecimal = new HiveDecimalWritable();
    }
View Full Code Here

      } else if (arr[0] instanceof Double) {
        assertEquals (key, (Double) expected, (Double) arr[0]);
      } else if (arr[0] instanceof Long) {
        assertEquals (key, (Long) expected, (Long) arr[0]);
      } else if (arr[0] instanceof HiveDecimalWritable) {
        HiveDecimalWritable hdw = (HiveDecimalWritable) arr[0];
        HiveDecimal hd = hdw.getHiveDecimal();
        HiveDecimal expectedDec = (HiveDecimal)expected;
        assertEquals (key, expectedDec, hd);
      } else if (arr[0] instanceof HiveDecimal) {
          HiveDecimal hd = (HiveDecimal) arr[0];
          HiveDecimal expectedDec = (HiveDecimal)expected;
View Full Code Here

        if (vals[1] instanceof DoubleWritable) {
          DoubleWritable dw = (DoubleWritable) vals[1];
          assertEquals (key, (Double) expected, (Double) (dw.get() / lw.get()));
        } else if (vals[1] instanceof HiveDecimalWritable) {
          HiveDecimalWritable hdw = (HiveDecimalWritable) vals[1];
          assertEquals (key, (HiveDecimal) expected, hdw.getHiveDecimal().divide(HiveDecimal.create(lw.get())));
        }
      }
    }
View Full Code Here

        new DecimalColumnVector(size, typeInfo.precision(), typeInfo.scale());

    dcv.noNulls = !nulls;
    dcv.isRepeating = repeating;

    HiveDecimalWritable repeatingValue = new HiveDecimalWritable();
    do{
      repeatingValue.set(HiveDecimal.create(((Double) rand.nextDouble()).toString()).setScale((short)typeInfo.scale()));
    }while(repeatingValue.getHiveDecimal().doubleValue() == 0);

    int nullFrequency = generateNullFrequency(rand);

    for(int i = 0; i < size; i++) {
      if(nulls && (repeating || i % nullFrequency == 0)) {
View Full Code Here

          keyValue = fwKey.get();
        } else if (key instanceof BooleanWritable) {
          BooleanWritable bwKey = (BooleanWritable)key;
          keyValue = bwKey.get();
        } else if (key instanceof HiveDecimalWritable) {
            HiveDecimalWritable hdwKey = (HiveDecimalWritable)key;
            keyValue = hdwKey.getHiveDecimal();
        } else {
          Assert.fail(String.format("Not implemented key output type %s: %s",
              key.getClass().getName(), key));
        }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.