Package org.apache.hadoop.hive.common.type

Examples of org.apache.hadoop.hive.common.type.HiveDecimal


        return fixed;
      } else {
        throw new AvroSerdeException("Unexpected Avro schema for Binary TypeInfo: " + schema.getType());
      }
    case DECIMAL:
      HiveDecimal dec = (HiveDecimal)fieldOI.getPrimitiveJavaObject(structFieldData);
      return AvroSerdeUtils.getBufferFromDecimal(dec, ((DecimalTypeInfo)typeInfo).scale());
    case CHAR:
      HiveChar ch = (HiveChar)fieldOI.getPrimitiveJavaObject(structFieldData);
      return ch.getStrippedValue();
    case VARCHAR:
View Full Code Here


        // read the null byte again
        buffer.read(positive ? invert : !invert);

        String digits = new String(decimalBuffer, 0, length, decimalCharSet);
        BigInteger bi = new BigInteger(digits);
        HiveDecimal bd = HiveDecimal.create(bi).scaleByPowerOfTen(factor-length);

        if (!positive) {
          bd = bd.negate();
        }

        bdw.set(bd);
        return bdw;
      }
View Full Code Here

        // Sign is 3 (bigger than 0)
        // Factor is -2 (move decimal point 2 positions right)
        // Digits are: 123

        HiveDecimalObjectInspector boi = (HiveDecimalObjectInspector) poi;
        HiveDecimal dec = boi.getPrimitiveJavaObject(o);

        // get the sign of the big decimal
        int sign = dec.compareTo(HiveDecimal.ZERO);

        // we'll encode the absolute value (sign is separate)
        dec = dec.abs();

        // get the scale factor to turn big decimal into a decimal < 1
        int factor = dec.precision() - dec.scale();
        factor = sign == 1 ? factor : -factor;

        // convert the absolute big decimal to string
        dec.scaleByPowerOfTen(Math.abs(dec.scale()));
        String digits = dec.unscaledValue().toString();

        // finally write out the pieces (sign, scale, digits)
        writeByte(buffer, (byte) ( sign + 1), invert);
        writeByte(buffer, (byte) ((factor >> 24) ^ 0x80), invert);
        writeByte(buffer, (byte) ( factor >> 16), invert);
View Full Code Here

    LazyUtils.writePrimitive(baos, doubleValue, doubleOI);
    m.put(cfBytes, "double".getBytes(), baos.toByteArray());

    // decimal
    baos.reset();
    HiveDecimal decimalValue = HiveDecimal.create(65536l);
    HiveDecimalWritable decimalWritable = new HiveDecimalWritable(decimalValue);
    decimalWritable.write(out);
    m.put(cfBytes, "decimal".getBytes(), baos.toByteArray());

    // date
View Full Code Here

            .getPrimitiveTypeInfo(serdeConstants.DOUBLE_TYPE_NAME));
    LazyUtils.writePrimitiveUTF8(baos, doubleValue, doubleOI, false, (byte) 0, null);
    m.put(cfBytes, "double".getBytes(), baos.toByteArray());

    // decimal
    HiveDecimal decimalValue = HiveDecimal.create("1.23");
    baos.reset();
    JavaHiveDecimalObjectInspector decimalOI = (JavaHiveDecimalObjectInspector) PrimitiveObjectInspectorFactory
        .getPrimitiveJavaObjectInspector(new DecimalTypeInfo(5, 2));
    LazyUtils.writePrimitiveUTF8(baos, decimalOI.create(decimalValue), decimalOI, false, (byte) 0,
        null);
View Full Code Here

        return new BytesWritable(v.getBytes("UTF-8"));
      } catch (UnsupportedEncodingException e) {
        throw new SerDeException("Failed to encode string in UTF-8", e);
      }
    case DECIMAL:
      HiveDecimal hd = (HiveDecimal)inspector.getPrimitiveJavaObject(obj);
      DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) inspector.getTypeInfo();
      int prec = decTypeInfo.precision();
      int scale = decTypeInfo.scale();
      byte[] src = hd.setScale(scale).unscaledValue().toByteArray();
      // Estimated number of bytes needed.
      int bytes =  PRECISION_TO_BYTE_COUNT[prec - 1];
      if (bytes == src.length) {
        // No padding needed.
        return new BytesWritable(src);
      }
      byte[] tgt = new byte[bytes];
      if ( hd.signum() == -1) {
        // For negative number, initializing bits to 1
        for (int i = 0; i < bytes; i++) {
          tgt[i] |= 0xFF;
        }
      }
View Full Code Here

      statsObj.getStatsData().getDecimalStats().setNumNulls(v);
    } else if (fName.equals("numdistinctvalues")) {
      long v = ((LongObjectInspector) oi).get(o);
      statsObj.getStatsData().getDecimalStats().setNumDVs(v);
    } else if (fName.equals("max")) {
      HiveDecimal d = ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o);
      statsObj.getStatsData().getDecimalStats().setHighValue(convertToThriftDecimal(d));
    } else if (fName.equals("min")) {
      HiveDecimal d = ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o);
      statsObj.getStatsData().getDecimalStats().setLowValue(convertToThriftDecimal(d));
    }
  }
View Full Code Here

    @Override
    void write(Object obj) throws IOException {
      super.write(obj);
      if (obj != null) {
        HiveDecimal decimal = ((HiveDecimalObjectInspector) inspector).
            getPrimitiveJavaObject(obj);
        if (decimal == null) {
          return;
        }
        SerializationUtils.writeBigInteger(valueStream,
            decimal.unscaledValue());
        scaleStream.write(decimal.scale());
        indexStatistics.updateDecimal(decimal);
      }
    }
View Full Code Here

    // TODO Auto-generated method stub
  }

  @Override
  public void setElement(int outElementNum, int inputElementNum, ColumnVector inputVector) {
    HiveDecimal hiveDec = ((DecimalColumnVector) inputVector).vector[inputElementNum].getHiveDecimal(precision, scale);
    if (hiveDec == null) {
      noNulls = false;
      isNull[outElementNum] = true;
    } else {
      vector[outElementNum].set(hiveDec);
View Full Code Here

      vector[outElementNum].set(hiveDec);
    }
  }

  public void set(int elementNum, HiveDecimalWritable writeable) {
    HiveDecimal hiveDec = writeable.getHiveDecimal(precision, scale);
    if (hiveDec == null) {
      noNulls = false;
      isNull[elementNum] = true;
    } else {
      vector[elementNum].set(hiveDec);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.common.type.HiveDecimal

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.