Package org.apache.hadoop.hive.serde2.objectinspector

Examples of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector


    !name.toLowerCase().equals(LAG_FUNC_NAME) ) {
    return getGenericUDAFEvaluator(name, argumentOIs, isDistinct, isAllColumns);
    }

    // this must be lead/lag UDAF
    ObjectInspector args[] = new ObjectInspector[argumentOIs.size()];
    GenericUDAFResolver udafResolver = finfo.getfInfo().getGenericUDAFResolver();
    GenericUDAFParameterInfo paramInfo = new SimpleGenericUDAFParameterInfo(
    argumentOIs.toArray(args), isDistinct, isAllColumns);
    return ((GenericUDAFResolver2) udafResolver).getEvaluator(paramInfo);
  }
View Full Code Here


      else {
        tblSerde = currPart.getTableDesc().getDeserializerClass().newInstance();
        tblSerde.initialize(job, currPart.getTableDesc().getProperties());
      }

      ObjectInspector outputOI = ObjectInspectorConverters.getConvertedOI(
          serde.getObjectInspector(),
          partitionedTableOI == null ? tblSerde.getObjectInspector() : partitionedTableOI, true);

      partTblObjectInspectorConverter = ObjectInspectorConverters.getConverter(
          serde.getObjectInspector(), outputOI);
View Full Code Here

      PartitionDesc partition = listParts.get(0);
      Deserializer tblSerde = partition.getTableDesc().getDeserializerClass().newInstance();
      tblSerde.initialize(job, partition.getTableDesc().getProperties());

      partitionedTableOI = null;
      ObjectInspector tableOI = tblSerde.getObjectInspector();

      // Get the OI corresponding to all the partitions
      for (PartitionDesc listPart : listParts) {
        partition = listPart;
        Deserializer partSerde = listPart.getDeserializerClass().newInstance();
View Full Code Here

    heartbeatInterval = HiveConf.getIntVar(hconf,
        HiveConf.ConfVars.HIVESENDHEARTBEAT);
    countAfterReport = 0;
    groupingSetsPresent = conf.isGroupingSetsPresent();
    ObjectInspector rowInspector = inputObjInspectors[0];

    // init keyFields
    int numKeys = conf.getKeys().size();

    keyFields = new ExprNodeEvaluator[numKeys];
    keyObjectInspectors = new ObjectInspector[numKeys];
    currentKeyObjectInspectors = new ObjectInspector[numKeys];
    for (int i = 0; i < numKeys; i++) {
      keyFields[i] = ExprNodeEvaluatorFactory.get(conf.getKeys().get(i));
      keyObjectInspectors[i] = keyFields[i].initialize(rowInspector);
      currentKeyObjectInspectors[i] = ObjectInspectorUtils
        .getStandardObjectInspector(keyObjectInspectors[i],
        ObjectInspectorCopyOption.WRITABLE);
    }

    // Initialize the constants for the grouping sets, so that they can be re-used for
    // each row
    if (groupingSetsPresent) {
      groupingSets = conf.getListGroupingSets();
      groupingSetsPosition = conf.getGroupingSetPosition();
      newKeysGroupingSets = new ArrayList<Object>();
      groupingSetsBitSet = new ArrayList<FastBitSet>();

      for (Integer groupingSet: groupingSets) {
        // Create the mapping corresponding to the grouping set
        ExprNodeEvaluator groupingSetValueEvaluator =
          ExprNodeEvaluatorFactory.get(new ExprNodeConstantDesc(String.valueOf(groupingSet)));

        newKeysGroupingSets.add(groupingSetValueEvaluator.evaluate(null));
        groupingSetsBitSet.add(groupingSet2BitSet(groupingSet));
      }
    }

    // initialize unionExpr for reduce-side
    // reduce KEY has union field as the last field if there are distinct
    // aggregates in group-by.
    List<? extends StructField> sfs =
      ((StructObjectInspector) rowInspector).getAllStructFieldRefs();
    if (sfs.size() > 0) {
      StructField keyField = sfs.get(0);
      if (keyField.getFieldName().toUpperCase().equals(
          Utilities.ReduceField.KEY.name())) {
        ObjectInspector keyObjInspector = keyField.getFieldObjectInspector();
        if (keyObjInspector instanceof StructObjectInspector) {
          List<? extends StructField> keysfs =
            ((StructObjectInspector) keyObjInspector).getAllStructFieldRefs();
          if (keysfs.size() > 0) {
            // the last field is the union field, if any
            StructField sf = keysfs.get(keysfs.size() - 1);
            if (sf.getFieldObjectInspector().getCategory().equals(
                ObjectInspector.Category.UNION)) {
              unionExprEval = ExprNodeEvaluatorFactory.get(
                new ExprNodeColumnDesc(TypeInfoUtils.getTypeInfoFromObjectInspector(
                sf.getFieldObjectInspector()),
                keyField.getFieldName() + "." + sf.getFieldName(), null,
                false));
              unionExprEval.initialize(rowInspector);
            }
          }
        }
      }
    }
    // init aggregationParameterFields
    ArrayList<AggregationDesc> aggrs = conf.getAggregators();
    aggregationParameterFields = new ExprNodeEvaluator[aggrs.size()][];
    aggregationParameterObjectInspectors = new ObjectInspector[aggrs.size()][];
    aggregationParameterStandardObjectInspectors = new ObjectInspector[aggrs.size()][];
    aggregationParameterObjects = new Object[aggrs.size()][];
    aggregationIsDistinct = new boolean[aggrs.size()];
    for (int i = 0; i < aggrs.size(); i++) {
      AggregationDesc aggr = aggrs.get(i);
      ArrayList<ExprNodeDesc> parameters = aggr.getParameters();
      aggregationParameterFields[i] = new ExprNodeEvaluator[parameters.size()];
      aggregationParameterObjectInspectors[i] = new ObjectInspector[parameters
          .size()];
      aggregationParameterStandardObjectInspectors[i] = new ObjectInspector[parameters
          .size()];
      aggregationParameterObjects[i] = new Object[parameters.size()];
      for (int j = 0; j < parameters.size(); j++) {
        aggregationParameterFields[i][j] = ExprNodeEvaluatorFactory
            .get(parameters.get(j));
        aggregationParameterObjectInspectors[i][j] = aggregationParameterFields[i][j]
            .initialize(rowInspector);
        if (unionExprEval != null) {
          String[] names = parameters.get(j).getExprString().split("\\.");
          // parameters of the form : KEY.colx:t.coly
          if (Utilities.ReduceField.KEY.name().equals(names[0])) {
            String name = names[names.length - 2];
            int tag = Integer.parseInt(name.split("\\:")[1]);
            if (aggr.getDistinct()) {
              // is distinct
              Set<Integer> set = distinctKeyAggrs.get(tag);
              if (null == set) {
                set = new HashSet<Integer>();
                distinctKeyAggrs.put(tag, set);
              }
              if (!set.contains(i)) {
                set.add(i);
              }
            } else {
              Set<Integer> set = nonDistinctKeyAggrs.get(tag);
              if (null == set) {
                set = new HashSet<Integer>();
                nonDistinctKeyAggrs.put(tag, set);
              }
              if (!set.contains(i)) {
                set.add(i);
              }
            }
          } else {
            // will be VALUE._COLx
            if (!nonDistinctAggrs.contains(i)) {
              nonDistinctAggrs.add(i);
            }
          }
        } else {
          if (aggr.getDistinct()) {
            aggregationIsDistinct[i] = true;
          }
        }
        aggregationParameterStandardObjectInspectors[i][j] = ObjectInspectorUtils
            .getStandardObjectInspector(
            aggregationParameterObjectInspectors[i][j],
            ObjectInspectorCopyOption.WRITABLE);
        aggregationParameterObjects[i][j] = null;
      }
      if (parameters.size() == 0) {
        // for ex: count(*)
        if (!nonDistinctAggrs.contains(i)) {
          nonDistinctAggrs.add(i);
        }
      }
    }

    // init aggregationClasses
    aggregationEvaluators = new GenericUDAFEvaluator[conf.getAggregators()
        .size()];
    for (int i = 0; i < aggregationEvaluators.length; i++) {
      AggregationDesc agg = conf.getAggregators().get(i);
      aggregationEvaluators[i] = agg.getGenericUDAFEvaluator();
    }

    // init objectInspectors
    int totalFields = keyFields.length + aggregationEvaluators.length;
    objectInspectors = new ArrayList<ObjectInspector>(totalFields);
    for (ExprNodeEvaluator keyField : keyFields) {
      objectInspectors.add(null);
    }
    MapredContext context = MapredContext.get();
    if (context != null) {
      for (GenericUDAFEvaluator genericUDAFEvaluator : aggregationEvaluators) {
        context.setup(genericUDAFEvaluator);
      }
    }
    for (int i = 0; i < aggregationEvaluators.length; i++) {
      ObjectInspector roi = aggregationEvaluators[i].init(conf.getAggregators()
          .get(i).getMode(), aggregationParameterObjectInspectors[i]);
      objectInspectors.add(roi);
    }

    aggregationsParametersLastInvoke = new Object[conf.getAggregators().size()][];
View Full Code Here

  }

  @Override
  public void processOp(Object row, int tag) throws HiveException {
    firstRow = false;
    ObjectInspector rowInspector = inputObjInspectors[tag];
    // Total number of input rows is needed for hash aggregation only
    if (hashAggr && !groupKeyIsNotReduceKey) {
      numRowsInput++;
      // if hash aggregation is not behaving properly, disable it
      if (numRowsInput == numRowsCompareHashAggr) {
View Full Code Here

      }
      outputValue = new Object[valueEval.length];

      this.tag = tag;

      ObjectInspector keyObjectInspector = initEvaluatorsAndReturnStruct(keyEval,
          outputKeyColumnNames, inputObjInspector);
      ObjectInspector valueObjectInspector = initEvaluatorsAndReturnStruct(valueEval,
          outputValueColumnNames, inputObjInspector);
      List<ObjectInspector> ois = new ArrayList<ObjectInspector>();
      ois.add(keyObjectInspector);
      ois.add(valueObjectInspector);
      this.outputObjInspector = ObjectInspectorFactory.getStandardStructObjectInspector(
View Full Code Here

  @Override
  public void configure(JobConf job) {
    rowObjectInspector = new ObjectInspector[Byte.MAX_VALUE];
    ObjectInspector[] valueObjectInspector = new ObjectInspector[Byte.MAX_VALUE];
    ObjectInspector keyObjectInspector;

    // Allocate the bean at the beginning -
    memoryMXBean = ManagementFactory.getMemoryMXBean();
    l4j.info("maximum memory = " + memoryMXBean.getHeapMemoryUsage().getMax());
View Full Code Here

      if (jobConf == null) {
        jobConf = job;
      }
      // initialize the forward operator
      ObjectInspector objectInspector = fetchOp.getOutputObjectInspector();
      forwardOp.initialize(jobConf, new ObjectInspector[] {objectInspector});
      l4j.info("fetchoperator for " + entry.getKey() + " initialized");
    }
  }
View Full Code Here

    fs.delete(testFilePath, false);
  }

  @Test
  public void testWriteFormat_0_11() throws Exception {
    ObjectInspector inspector;
    synchronized (TestOrcFile.class) {
      inspector = ObjectInspectorFactory
          .getReflectionObjectInspector(AllTypesRow.class,
              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
    }
View Full Code Here

    rows.close();
  }

  @Test
  public void test1() throws Exception {
    ObjectInspector inspector;
    synchronized (TestOrcFile.class) {
      inspector = ObjectInspectorFactory.getReflectionObjectInspector
          (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
    }
    Writer writer = OrcFile.createWriter(testFilePath,
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.