Examples of SerDeException


Examples of org.apache.hadoop.hive.serde2.SerDeException

   * @see SerDe#serialize(Object, ObjectInspector)
   */
  public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {

    if (objInspector.getCategory() != Category.STRUCT) {
      throw new SerDeException(getClass().toString()
          + " can only serialize struct types, but we got: "
          + objInspector.getTypeName());
    }

    // Prepare the field ObjectInspectors
    StructObjectInspector soi = (StructObjectInspector) objInspector;
    List<? extends StructField> fields = soi.getAllStructFieldRefs();
    List<Object> list = soi.getStructFieldsDataAsList(obj);
    List<? extends StructField> declaredFields = (serdeParams.getRowTypeInfo() != null && ((StructTypeInfo) serdeParams
        .getRowTypeInfo()).getAllStructFieldNames().size() > 0) ? ((StructObjectInspector) getObjectInspector())
        .getAllStructFieldRefs()
        : null;

    try {
      // used for avoid extra byte copy
      serializeStream.reset();
      serializedSize = 0;
      int count = 0;
      // Serialize each field
      for (int i = 0; i < fields.size(); i++) {
        // Get the field objectInspector and the field object.
        ObjectInspector foi = fields.get(i).getFieldObjectInspector();
        Object f = (list == null ? null : list.get(i));

        if (declaredFields != null && i >= declaredFields.size()) {
          throw new SerDeException("Error: expecting " + declaredFields.size()
              + " but asking for field " + i + "\n" + "data=" + obj + "\n"
              + "tableType=" + serdeParams.getRowTypeInfo().toString() + "\n"
              + "dataType="
              + TypeInfoUtils.getTypeInfoFromObjectInspector(objInspector));
        }

        // If the field that is passed in is NOT a primitive, and either the
        // field is not declared (no schema was given at initialization), or
        // the field is declared as a primitive in initialization, serialize
        // the data to JSON string. Otherwise serialize the data in the
        // delimited way.
        if (!foi.getCategory().equals(Category.PRIMITIVE)
            && (declaredFields == null || declaredFields.get(i)
                .getFieldObjectInspector().getCategory().equals(
                    Category.PRIMITIVE))) {
          LazySimpleSerDe.serialize(serializeStream, SerDeUtils.getJSONString(
              f, foi),
              PrimitiveObjectInspectorFactory.javaStringObjectInspector,
              serdeParams.getSeparators(), 1, serdeParams.getNullSequence(),
              serdeParams.isEscaped(), serdeParams.getEscapeChar(), serdeParams
                  .getNeedsEscape());
        } else {
          LazySimpleSerDe.serialize(serializeStream, f, foi, serdeParams
              .getSeparators(), 1, serdeParams.getNullSequence(), serdeParams
              .isEscaped(), serdeParams.getEscapeChar(), serdeParams
              .getNeedsEscape());
        }

        field[i].set(serializeStream.getData(), count, serializeStream
            .getCount()
            - count);
        count = serializeStream.getCount();
      }
      serializedSize = serializeStream.getCount();
      lastOperationSerialize = true;
      lastOperationDeserialize = false;
    } catch (IOException e) {
      throw new SerDeException(e);
    }
    return serializeCache;
  }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.SerDeException

    serdeParams.columnTypes = TypeInfoUtils
        .getTypeInfosFromTypeString(columnTypeProperty);

    if (serdeParams.columnNames.size() != serdeParams.columnTypes.size()) {
      throw new SerDeException(serdeName + ": columns has "
          + serdeParams.columnNames.size()
          + " elements while columns.types has "
          + serdeParams.columnTypes.size() + " elements!");
    }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.SerDeException

    } else if (field instanceof Text) {
      Text t = (Text) field;
      byteArrayRef.setData(t.getBytes());
      cachedLazyStruct.init(byteArrayRef, 0, t.getLength());
    } else {
      throw new SerDeException(getClass().toString()
          + ": expects either BytesWritable or Text object!");
    }
    lastOperationSerialize = false;
    lastOperationDeserialize = true;
    return cachedLazyStruct;
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.SerDeException

   */
  public Writable serialize(Object obj, ObjectInspector objInspector)
      throws SerDeException {

    if (objInspector.getCategory() != Category.STRUCT) {
      throw new SerDeException(getClass().toString()
          + " can only serialize struct types, but we got: "
          + objInspector.getTypeName());
    }

    // Prepare the field ObjectInspectors
    StructObjectInspector soi = (StructObjectInspector) objInspector;
    List<? extends StructField> fields = soi.getAllStructFieldRefs();
    List<Object> list = soi.getStructFieldsDataAsList(obj);
    List<? extends StructField> declaredFields = (serdeParams.rowTypeInfo != null && ((StructTypeInfo) serdeParams.rowTypeInfo)
        .getAllStructFieldNames().size() > 0) ? ((StructObjectInspector) getObjectInspector())
        .getAllStructFieldRefs()
        : null;

    serializeStream.reset();
    serializedSize = 0;

    // Serialize each field
    for (int i = 0; i < fields.size(); i++) {
      // Append the separator if needed.
      if (i > 0) {
        serializeStream.write(serdeParams.separators[0]);
      }
      // Get the field objectInspector and the field object.
      ObjectInspector foi = fields.get(i).getFieldObjectInspector();
      Object f = (list == null ? null : list.get(i));

      if (declaredFields != null && i >= declaredFields.size()) {
        throw new SerDeException("Error: expecting " + declaredFields.size()
            + " but asking for field " + i + "\n" + "data=" + obj + "\n"
            + "tableType=" + serdeParams.rowTypeInfo.toString() + "\n"
            + "dataType="
            + TypeInfoUtils.getTypeInfoFromObjectInspector(objInspector));
      }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.SerDeException

      SerDeParameters serdeParams) throws SerDeException {
    try {
      serialize(out, obj, objInspector, serdeParams.separators, 1, serdeParams.nullSequence,
          serdeParams.escaped, serdeParams.escapeChar, serdeParams.needsEscape);
    } catch (IOException e) {
      throw new SerDeException(e);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.SerDeException

      for (int i = 0; i < columnNames.size(); i++) {
        row.set(i, deserialize(inputByteBuffer, columnTypes.get(i),
            columnSortOrderIsDesc[i], row.get(i)));
      }
    } catch (IOException e) {
      throw new SerDeException(e);
    }

    return row;
  }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.SerDeException

    try {
      match.matches();
      c.bucketowner = match.group(t++);
      c.bucketname = match.group(t++);
    } catch (Exception e) {
      throw new SerDeException("S3 Log Regex did not match:" + row, e);
    }
    c.rdatetime = match.group(t++);

    // Should we convert the datetime to the format Hive understands by default
    // - either yyyy-mm-dd HH:MM:SS or seconds since epoch?
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.SerDeException

    if (field instanceof BytesWritable) {
      BytesWritable b = (BytesWritable) field;
      try {
        row = Text.decode(b.get(), 0, b.getSize());
      } catch (CharacterCodingException e) {
        throw new SerDeException(e);
      }
    } else if (field instanceof Text) {
      row = field.toString();
    }
    try {
      deserialize(deserializeCache, row);
      return deserializeCache;
    } catch (ClassCastException e) {
      throw new SerDeException(this.getClass().getName()
          + " expects Text or BytesWritable", e);
    } catch (Exception e) {
      throw new SerDeException(e);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.SerDeException

    numColumns = columnNames.size();

    // All columns have to be primitive.
    for (int c = 0; c < numColumns; c++) {
      if (columnTypes.get(c).getCategory() != Category.PRIMITIVE) {
        throw new SerDeException(getClass().getName()
            + " only accepts primitive columns, but column[" + c + "] named "
            + columnNames.get(c) + " has category "
            + columnTypes.get(c).getCategory());
      }
    }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.SerDeException

      // The next byte should be the marker
      assert tbIn.readTypeCode() == Type.ENDOFRECORD;

    } catch (IOException e) {
      throw new SerDeException(e);
    }

    return row;
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.