Package org.apache.hadoop.hive.serde2.objectinspector

Examples of org.apache.hadoop.hive.serde2.objectinspector.StructField


    // Now go the correct way, through objectinspectors
    StandardStructObjectInspector oi = (StandardStructObjectInspector)aoig.getObjectInspector();
    List<Object> fieldsDataAsList = oi.getStructFieldsDataAsList(row);
    assertEquals(1, fieldsDataAsList.size());
    StructField fieldRef = oi.getStructFieldRef("hash");

    outBytes = (byte[]) oi.getStructFieldData(row, fieldRef);
    for(int i = 0; i < outBytes.length; i++) {
      assertEquals(bytes[i], outBytes[i]);
    }
View Full Code Here


    // Now go the correct way, through objectinspectors
    StandardStructObjectInspector oi = (StandardStructObjectInspector)aoig.getObjectInspector();
    List<Object> fieldsDataAsList = oi.getStructFieldsDataAsList(row);
    assertEquals(1, fieldsDataAsList.size());
    StructField fieldRef = oi.getStructFieldRef("bytesField");

    outBytes = (byte[]) oi.getStructFieldData(row, fieldRef);
    for(int i = 0; i < outBytes.length; i++) {
      assertEquals(bytes[i], outBytes[i]);
    }
View Full Code Here

    StandardStructObjectInspector oi =
            (StandardStructObjectInspector)aoig.getObjectInspector();

    List<Object> z = oi.getStructFieldsDataAsList(row);
    assertEquals(1, z.size());
    StructField fieldRef = oi.getStructFieldRef("amap");

    Map theMap2 = (Map)oi.getStructFieldData(row, fieldRef);
    assertEquals(1l, theMap2.get("one"));
    assertEquals(2l, theMap2.get("two"));
    assertEquals(3l, theMap2.get("three"));
View Full Code Here

public class TestSerdeWithFieldComments extends TestCase {

  private StructField mockedStructField(String name, String oiTypeName,
                                        String comment) {
    StructField m = mock(StructField.class);
    when(m.getFieldName()).thenReturn(name);

    ObjectInspector oi = mock(ObjectInspector.class);
    when(oi.getTypeName()).thenReturn(oiTypeName);

    when(m.getFieldObjectInspector()).thenReturn(oi);
    when(m.getFieldComment()).thenReturn(comment);

    return m;
  }
View Full Code Here

    // reduce KEY has union field as the last field if there are distinct
    // aggregates in group-by.
    List<? extends StructField> sfs =
      ((StructObjectInspector) rowInspector).getAllStructFieldRefs();
    if (sfs.size() > 0) {
      StructField keyField = sfs.get(0);
      if (keyField.getFieldName().toUpperCase().equals(
          Utilities.ReduceField.KEY.name())) {
        ObjectInspector keyObjInspector = keyField.getFieldObjectInspector();
        if (keyObjInspector instanceof StructObjectInspector) {
          List<? extends StructField> keysfs =
            ((StructObjectInspector) keyObjInspector).getAllStructFieldRefs();
          if (keysfs.size() > 0) {
            // the last field is the union field, if any
            StructField sf = keysfs.get(keysfs.size() - 1);
            if (sf.getFieldObjectInspector().getCategory().equals(
                ObjectInspector.Category.UNION)) {
              unionExprEval = ExprNodeEvaluatorFactory.get(
                new ExprNodeColumnDesc(TypeInfoUtils.getTypeInfoFromObjectInspector(
                sf.getFieldObjectInspector()),
                keyField.getFieldName() + "." + sf.getFieldName(), null,
                false));
              unionExprEval.initialize(rowInspector);
            }
          }
        }
View Full Code Here

      return EMPTY_BYTE_ARRAY; // shortcut for null keys
    }
    Object[] fieldData = new Object[size];
    List<ObjectInspector> fieldOis = new ArrayList<ObjectInspector>(size);
    for (int i = 0; i < size; ++i) {
      StructField field = fields.get(i);
      ObjectInspector foi = field.getFieldObjectInspector();
      if (checkTypes) {
        if (foi.getCategory() != Category.PRIMITIVE) return null; // not supported
        PrimitiveCategory pc = ((PrimitiveObjectInspector)foi).getPrimitiveCategory();
        if (!SUPPORTED_PRIMITIVES.contains(pc)) return null; // not supported
      }
View Full Code Here

      }

      // number of rows for the key in the given table
      long sz = storage[alias].rowCount();
      StructObjectInspector soi = (StructObjectInspector) inputObjInspectors[tag];
      StructField sf = soi.getStructFieldRef(Utilities.ReduceField.KEY
          .toString());
      List keyObject = (List) soi.getStructFieldData(row, sf);
      // Are we consuming too much memory
      if (alias == numAliases - 1 && !(handleSkewJoin && skewJoinKeyContext.currBigKeyTag >= 0) &&
          !hasLeftSemiJoin) {
        if (sz == joinEmitInterval && !hasFilter(alias)) {
          // The input is sorted by alias, so if we are already in the last join
          // operand,
          // we can emit some results now.
          // Note this has to be done before adding the current row to the
          // storage,
          // to preserve the correctness for outer joins.
          checkAndGenObject();
          storage[alias].clearRows();
        }
      } else {
        if (sz == nextSz) {
          // Print a message if we reached at least 1000 rows for a join operand
          // We won't print a message for the last join operand since the size
          // will never goes to joinEmitInterval.
          LOG.info("table " + alias + " has " + sz + " rows for join key "
              + keyObject);
          nextSz = getNextSize(nextSz);
        }
      }

      // Add the value to the vector
      // if join-key is null, process each row in different group.
      StructObjectInspector inspector =
          (StructObjectInspector) sf.getFieldObjectInspector();
      if (SerDeUtils.hasAnyNullObject(keyObject, inspector, nullsafes)) {
        endGroup();
        startGroup();
      }
      storage[alias].addRow(nr);
View Full Code Here

    StructObjectInspector soi = (StructObjectInspector) outputOI;
    VectorColumnAssign[] vcas = new VectorColumnAssign[outputColumnNames.size()];
    for (int i=0; i<outputColumnNames.size(); ++i) {
      String columnName = outputColumnNames.get(i);
      Integer columnIndex = columnMap.get(columnName);
      StructField columnRef = soi.getStructFieldRef(columnName);
      ObjectInspector valueOI = columnRef.getFieldObjectInspector();
      vcas[i] = buildObjectAssign(outputBatch, columnIndex, valueOI);
    }
    return vcas;
  }
View Full Code Here

      StructObjectInspector structOI = (StructObjectInspector) rowOI;
      List<? extends StructField> fields = structOI.getAllStructFieldRefs();

      Object[] converted = new Object[fields.size()];
      for (int i = 0 ; i < converted.length; i++) {
        StructField fieldRef = fields.get(i);
        Object field = structOI.getStructFieldData(row, fieldRef);
        converted[i] = field == null ? null :
            SerDeUtils.toThriftPayload(field, fieldRef.getFieldObjectInspector(), protocol);
      }
      return converted;
    }
View Full Code Here

    @Override
    public String getTypeName() {
      StringBuilder buffer = new StringBuilder();
      buffer.append("struct<");
      for(int i=0; i < fields.size(); ++i) {
        StructField field = fields.get(i);
        if (i != 0) {
          buffer.append(",");
        }
        buffer.append(field.getFieldName());
        buffer.append(":");
        buffer.append(field.getFieldObjectInspector().getTypeName());
      }
      buffer.append(">");
      return buffer.toString();
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.objectinspector.StructField

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.