Package org.apache.hadoop.hive.serde2.io

Examples of org.apache.hadoop.hive.serde2.io.TimestampWritable


    LazyBinaryPrimitive<WritableTimestampObjectInspector, TimestampWritable> {
  static final Log LOG = LogFactory.getLog(LazyBinaryTimestamp.class);

  LazyBinaryTimestamp(WritableTimestampObjectInspector oi) {
    super(oi);
    data = new TimestampWritable();
  }
View Full Code Here


    data = new TimestampWritable();
  }

  LazyBinaryTimestamp(LazyBinaryTimestamp copy) {
    super(copy);
    data = new TimestampWritable(copy.data);
  }
View Full Code Here

        d.set(deserializeInt(buffer, invert));
        return d;
      }

      case TIMESTAMP:
        TimestampWritable t = (reuse == null ? new TimestampWritable() :
            (TimestampWritable) reuse);
        byte[] bytes = new byte[TimestampWritable.BINARY_SORTABLE_LENGTH];

        for (int i = 0; i < bytes.length; i++) {
          bytes[i] = buffer.read(invert);
        }
        t.setBinarySortable(bytes, 0);
        return t;

      case DECIMAL: {
        // See serialization of decimal for explanation (below)
View Full Code Here

        serializeInt(buffer, v, invert);
        return;
      }
      case TIMESTAMP: {
        TimestampObjectInspector toi = (TimestampObjectInspector) poi;
        TimestampWritable t = toi.getPrimitiveWritableObject(o);
        byte[] data = t.getBinarySortable();
        for (int i = 0; i < data.length; i++) {
          buffer.write(data[i], invert);
        }
        return;
      }
View Full Code Here

              case TIMESTAMP:
                LongColumnVector tcv = (LongColumnVector) batch.cols[k];
                long timeInNanoSec = tcv.vector[rowIndex];
                Timestamp t = new Timestamp(0);
                TimestampUtils.assignTimeInNanoSec(timeInNanoSec, t);
                TimestampWritable tw = new TimestampWritable();
                tw.set(t);
                LazyTimestamp.writeUTF8(serializeVectorStream, tw);
                break;
              case DATE:
                LongColumnVector dacv = (LongColumnVector) batch.cols[k];
                DateWritable daw = new DateWritable((int) dacv.vector[rowIndex]);
View Full Code Here

          public void assignObjectValue(Object val, int destIndex) throws HiveException {
            if (val == null) {
              assignNull(destIndex);
            }
            else {
              TimestampWritable bw = (TimestampWritable) val;
              Timestamp t = bw.getTimestamp();
              assignLong(TimestampUtils.getTimeNanoSec(t), destIndex);
            }
          }
        }.init(outputBatch, (LongColumnVector) destCol);
        break;
      case DATE:
        outVCA = new VectorLongColumnAssign() {
          @Override
          public void assignObjectValue(Object val, int destIndex) throws HiveException {
            if (val == null) {
              assignNull(destIndex);
            }
            else {
              DateWritable bw = (DateWritable) val;
              assignLong(bw.getDays(), destIndex);
            }
          }
        }.init(outputBatch, (LongColumnVector) destCol);
        break;
      default:
        throw new HiveException("Incompatible Long vector column and primitive category " +
            poi.getPrimitiveCategory());
      }
    }
    else if (destCol instanceof DoubleColumnVector) {
      switch(poi.getPrimitiveCategory()) {
      case DOUBLE:
        outVCA = new VectorDoubleColumnAssign() {
          @Override
          public void assignObjectValue(Object val, int destIndex) throws HiveException {
            if (val == null) {
              assignNull(destIndex);
            }
            else {
              DoubleWritable bw = (DoubleWritable) val;
              assignDouble(bw.get(), destIndex);
            }
          }
        }.init(outputBatch, (DoubleColumnVector) destCol);
        break;
      case FLOAT:
        outVCA = new VectorDoubleColumnAssign() {
          @Override
          public void assignObjectValue(Object val, int destIndex) throws HiveException {
            if (val == null) {
              assignNull(destIndex);
            }
            else {
              FloatWritable bw = (FloatWritable) val;
              assignDouble(bw.get(), destIndex);
            }
          }
        }.init(outputBatch, (DoubleColumnVector) destCol);
        break;
      default:
        throw new HiveException("Incompatible Double vector column and primitive category " +
            poi.getPrimitiveCategory());
      }
    }
    else if (destCol instanceof BytesColumnVector) {
      switch(poi.getPrimitiveCategory()) {
      case STRING:
        outVCA = new VectorBytesColumnAssign() {
          @Override
          public void assignObjectValue(Object val, int destIndex) throws HiveException {
            if (val == null) {
              assignNull(destIndex);
            }
            else {
              Text bw = (Text) val;
              byte[] bytes = bw.getBytes();
              assignBytes(bytes, 0, bytes.length, destIndex);
            }
          }
        }.init(outputBatch, (BytesColumnVector) destCol);
        break;
View Full Code Here

    ObjectInspector[] arguments = {valueOI};
    udf.initialize(arguments);

    Timestamp ts = Timestamp.valueOf("1970-01-01 00:00:00");
    runAndVerify(udf,
        new TimestampWritable(ts),
        new LongWritable(ts.getTime() / 1000));

    ts = Timestamp.valueOf("2001-02-03 01:02:03");
    runAndVerify(udf,
        new TimestampWritable(ts),
        new LongWritable(ts.getTime() / 1000));

    // test null values
    runAndVerify(udf, null, null);
  }
View Full Code Here

    }

    @Override
    Object next(Object previous) throws IOException {
      super.next(previous);
      TimestampWritable result = null;
      if (valuePresent) {
        if (previous == null) {
          result = new TimestampWritable();
        } else {
          result = (TimestampWritable) previous;
        }
        Timestamp ts = new Timestamp(0);
        long millis = (data.next() + WriterImpl.BASE_TIMESTAMP) *
            WriterImpl.MILLIS_PER_SECOND;
        int newNanos = parseNanos(nanos.next());
        // fix the rounding when we divided by 1000.
        if (millis >= 0) {
          millis += newNanos / 1000000;
        } else {
          millis -= newNanos / 1000000;
        }
        ts.setTime(millis);
        ts.setNanos(newNanos);
        result.set(ts);
      }
      return result;
    }
View Full Code Here

    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
    ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
    ObjectInspector[] arguments = {valueOI1, valueOI2};

    udf.initialize(arguments);
    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
        20, 4, 17, 52, 0)));
    DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3"));
    DeferredObject[] args = {valueObj1, valueObj2};
    Text output = (Text) udf.evaluate(args);
View Full Code Here

    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
    ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
    ObjectInspector[] arguments = {valueOI1, valueOI2};

    udf.initialize(arguments);
    DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
        20, 0, 0, 0, 0)));
    DeferredObject valueObj2 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
        17, 0, 0, 0, 0)));
    DeferredObject[] args = {valueObj1, valueObj2};
    IntWritable output = (IntWritable) udf.evaluate(args);

    assertEquals("datediff() test for TIMESTAMP failed ", "3", output.toString());
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.io.TimestampWritable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.