Package org.apache.hadoop.hive.serde2.objectinspector

Examples of org.apache.hadoop.hive.serde2.objectinspector.StructField


          if (!(oi instanceof StructObjectInspector)) {
            oi = s.getObjectInspector();
            break;
          }
          StructObjectInspector soi = (StructObjectInspector)oi;
          StructField sf = soi.getStructFieldRef(names[i]);
          if (sf == null) {
            // If invalid field, then return the schema of the table
            oi = s.getObjectInspector();
            break;
          } else {
            oi = sf.getFieldObjectInspector();
          }
        }

        // rules on how to recurse the SerDe based on its type
        if (oi.getCategory() != Category.STRUCT) {
View Full Code Here


        if (sz == nextSz) {
          // Output a warning if we reached at least 1000 rows for a join operand
          // We won't output a warning for the last join operand since the size
          // will never goes to joinEmitInterval.
          StructObjectInspector soi = (StructObjectInspector)inputObjInspectors[tag];
          StructField sf = soi.getStructFieldRef(Utilities.ReduceField.KEY.toString());
          Object keyObject = soi.getStructFieldData(row, sf);
          LOG.warn("table " + alias + " has " + sz + " rows for join key " + keyObject);
          nextSz = getNextSize(nextSz);
        }
      }
View Full Code Here

        cdop.retrieve(io);
        System.out.println("[" + i + "] io.o=" + io.o);
        System.out.println("[" + i + "] io.oi=" + io.oi);
        StructObjectInspector soi = (StructObjectInspector)io.oi;
        assert(soi != null);
        StructField a = soi.getStructFieldRef("a");
        StructField b = soi.getStructFieldRef("b");
        assertEquals(""+(i+1), ((PrimitiveObjectInspector)a.getFieldObjectInspector())
            .getPrimitiveJavaObject(soi.getStructFieldData(io.o, a)));
        assertEquals((i) + "1", ((PrimitiveObjectInspector)b.getFieldObjectInspector())
            .getPrimitiveJavaObject(soi.getStructFieldData(io.o, b)));
      }

      System.out.println("Script Operator ok");
View Full Code Here

    String last_name = names[names.length-1];
    for(int i = 1; i < names.length; i++) {

      if (oi instanceof StructObjectInspector) {
        StructObjectInspector soi = (StructObjectInspector)oi;
        StructField sf = soi.getStructFieldRef(names[i]);
        if (sf == null) {
          throw new MetaException("Invalid Field " + names[i]);
        } else {
          oi = sf.getFieldObjectInspector();
        }
      }
      else if (oi instanceof ListObjectInspector && names[i].equalsIgnoreCase("$elem$")) {
        ListObjectInspector loi = (ListObjectInspector)oi;
        oi = loi.getListElementObjectInspector();
View Full Code Here

    String last_name = names[names.length - 1];
    for (int i = 1; i < names.length; i++) {

      if (oi instanceof StructObjectInspector) {
        StructObjectInspector soi = (StructObjectInspector) oi;
        StructField sf = soi.getStructFieldRef(names[i]);
        if (sf == null) {
          throw new MetaException("Invalid Field " + names[i]);
        } else {
          oi = sf.getFieldObjectInspector();
        }
      } else if (oi instanceof ListObjectInspector
          && names[i].equalsIgnoreCase("$elem$")) {
        ListObjectInspector loi = (ListObjectInspector) oi;
        oi = loi.getListElementObjectInspector();
      } else if (oi instanceof MapObjectInspector
          && names[i].equalsIgnoreCase("$key$")) {
        MapObjectInspector moi = (MapObjectInspector) oi;
        oi = moi.getMapKeyObjectInspector();
      } else if (oi instanceof MapObjectInspector
          && names[i].equalsIgnoreCase("$value$")) {
        MapObjectInspector moi = (MapObjectInspector) oi;
        oi = moi.getMapValueObjectInspector();
      } else {
        throw new MetaException("Unknown type for " + names[i]);
      }
    }

    ArrayList<FieldSchema> str_fields = new ArrayList<FieldSchema>();
    // rules on how to recurse the ObjectInspector based on its type
    if (oi.getCategory() != Category.STRUCT) {
      str_fields.add(new FieldSchema(last_name, oi.getTypeName(),
          FROM_SERIALIZER));
    } else {
      List<? extends StructField> fields = ((StructObjectInspector) oi)
          .getAllStructFieldRefs();
      for (int i = 0; i < fields.size(); i++) {
        StructField structField = fields.get(i);
        String fieldName = structField.getFieldName();
        String fieldTypeName = structField.getFieldObjectInspector().getTypeName();
        String fieldComment = determineFieldComment(structField.getFieldComment());

        str_fields.add(new FieldSchema(fieldName, fieldTypeName, fieldComment));
      }
    }
    return str_fields;
View Full Code Here

      List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
      Object data = serde.deserialize(new BytesWritable(rowStr.getBytes()));

      assert row.size() == fieldRefs.size() : row.size() + ", " + fieldRefs.size();
      for (int i = 0; i < fieldRefs.size(); i++) {
        StructField fieldRef = fieldRefs.get(i);
        ObjectInspector oi = fieldRef.getFieldObjectInspector();
        Object obj = soi.getStructFieldData(data, fieldRef);
        row.set(i, convertLazyToJava(obj, oi));
      }

      if (LOG.isDebugEnabled()) {
View Full Code Here

    for (int i = 0; i < numAliases; i++) {
      Byte alias = conf.getTagOrder()[i];
      List<ObjectInspector> skewTableKeyInspectors = new ArrayList<ObjectInspector>();
      StructObjectInspector soi = (StructObjectInspector) joinOp.inputObjInspectors[alias];
      StructField sf = soi.getStructFieldRef(Utilities.ReduceField.KEY
          .toString());
      List<? extends StructField> keyFields = ((StructObjectInspector) sf
          .getFieldObjectInspector()).getAllStructFieldRefs();
      int keyFieldSize = keyFields.size();
      for (int k = 0; k < keyFieldSize; k++) {
        skewTableKeyInspectors.add(keyFields.get(k).getFieldObjectInspector());
      }
View Full Code Here

        cdop.retrieve(io);
        System.out.println("[" + i + "] io.o=" + io.o);
        System.out.println("[" + i + "] io.oi=" + io.oi);
        StructObjectInspector soi = (StructObjectInspector)io.oi;
        assert(soi != null);
        StructField a = soi.getStructFieldRef("a");
        StructField b = soi.getStructFieldRef("b");
        assertEquals(""+(i+1), soi.getStructFieldData(io.o, a));
        assertEquals((i) + "1", soi.getStructFieldData(io.o, b));
      }

      System.out.println("Script Operator ok");
View Full Code Here

      }

      // number of rows for the key in the given table
      int sz = storage.get(alias).size();
      StructObjectInspector soi = (StructObjectInspector) inputObjInspectors[tag];
      StructField sf = soi.getStructFieldRef(Utilities.ReduceField.KEY
          .toString());
      List keyObject = (List) soi.getStructFieldData(row, sf);
      // Are we consuming too much memory
      if (alias == numAliases - 1 && !(handleSkewJoin && skewJoinKeyContext.currBigKeyTag >= 0)) {
        if (sz == joinEmitInterval) {
          // The input is sorted by alias, so if we are already in the last join
          // operand,
          // we can emit some results now.
          // Note this has to be done before adding the current row to the
          // storage,
          // to preserve the correctness for outer joins.
          checkAndGenObject();
          storage.get(alias).clear();
        }
      } else {
        if (sz == nextSz) {
          // Print a message if we reached at least 1000 rows for a join operand
          // We won't print a message for the last join operand since the size
          // will never goes to joinEmitInterval.
          LOG.info("table " + alias + " has " + sz + " rows for join key "
              + keyObject);
          nextSz = getNextSize(nextSz);
        }
      }

      // Add the value to the vector
      // if join-key is null, process each row in different group.
      StandardStructObjectInspector inspector =
          (StandardStructObjectInspector) sf.getFieldObjectInspector();
      if (SerDeUtils.hasAnyNullObject(keyObject, inspector, nullsafes)) {
        endGroup();
        startGroup();
      }
      storage.get(alias).add(nr);
View Full Code Here

            names[i] = column.getName();
            types[i] = column.getType();
            hiveTypes[i] = column.getHiveType();

            if (!column.isPartitionKey()) {
                StructField field = rowInspector.getStructFieldRef(column.getName());
                structFields[i] = field;
                fieldInspectors[i] = field.getFieldObjectInspector();
            }

            isPartitionColumn[i] = column.isPartitionKey();
        }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.objectinspector.StructField

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.