Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.GroupByDesc


      String column,
      TypeInfo dataTypeInfo,
      String key,
      TypeInfo keyTypeInfo) {

    GroupByDesc desc = buildGroupByDescType(ctx, aggregate, column, dataTypeInfo);

    ExprNodeDesc keyExp = buildColumnDesc(ctx, key, keyTypeInfo);
    ArrayList<ExprNodeDesc> keys = new ArrayList<ExprNodeDesc>();
    keys.add(keyExp);
    desc.setKeys(keys);

    desc.getOutputColumnNames().add("_col1");

    return desc;
  }
View Full Code Here


    Map<String, Integer> mapColumnNames = new HashMap<String, Integer>();
    mapColumnNames.put("Key", 0);
    mapColumnNames.put("Value", 1);
    VectorizationContext ctx = new VectorizationContext(mapColumnNames, 2);

    GroupByDesc desc = buildKeyGroupByDesc (ctx, "max",
        "Value", TypeInfoFactory.longTypeInfo,
        "Key", TypeInfoFactory.longTypeInfo);

    // Set the memory treshold so that we get 100Kb before we need to flush.
    MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
    long maxMemory = memoryMXBean.getHeapMemoryUsage().getMax();

    float treshold = 100.0f*1024.0f/maxMemory;
    desc.setMemoryThreshold(treshold);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);
View Full Code Here

      keysDesc.add(
        buildColumnDesc(ctx, columnName,
            TypeInfoFactory.getPrimitiveTypeInfo(columnTypes[i])));
    }

    GroupByDesc desc = new GroupByDesc();
    desc.setOutputColumnNames(outputColumnNames);
    desc.setAggregators(aggs);
    desc.setKeys(keysDesc);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);
View Full Code Here

    ArrayList<String> outputColumnNames = new ArrayList<String>();
    outputColumnNames.add("_col0");
    outputColumnNames.add("_col1");

    GroupByDesc desc = new GroupByDesc();
    desc.setOutputColumnNames(outputColumnNames);
    desc.setAggregators(aggs);

    ExprNodeDesc keyExp = buildColumnDesc(ctx, "Key",
        TypeInfoFactory.getPrimitiveTypeInfo(data.getTypes()[0]));
    ArrayList<ExprNodeDesc> keysDesc = new ArrayList<ExprNodeDesc>();
    keysDesc.add(keyExp);
    desc.setKeys(keysDesc);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);
View Full Code Here

      Object expected) throws HiveException {
    Map<String, Integer> mapColumnNames = new HashMap<String, Integer>();
    mapColumnNames.put("A", 0);
    VectorizationContext ctx = new VectorizationContext(mapColumnNames, 1);

    GroupByDesc desc = buildGroupByDescCountStar (ctx);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);
View Full Code Here

      Object expected) throws HiveException {
    Map<String, Integer> mapColumnNames = new HashMap<String, Integer>();
    mapColumnNames.put("A", 0);
    VectorizationContext ctx = new VectorizationContext(mapColumnNames, 1);

    GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A",
        TypeInfoFactory.stringTypeInfo);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
View Full Code Here

          Object expected) throws HiveException {
        Map<String, Integer> mapColumnNames = new HashMap<String, Integer>();
        mapColumnNames.put("A", 0);
        VectorizationContext ctx = new VectorizationContext(mapColumnNames, 1);

        GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A",
            TypeInfoFactory.getDecimalTypeInfo(30, 4));

        VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

        FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
View Full Code Here

      Object expected) throws HiveException {
    Map<String, Integer> mapColumnNames = new HashMap<String, Integer>();
    mapColumnNames.put("A", 0);
    VectorizationContext ctx = new VectorizationContext(mapColumnNames, 1);

    GroupByDesc desc = buildGroupByDescType (ctx, aggregateName, "A",
        TypeInfoFactory.doubleTypeInfo);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
View Full Code Here

      Object expected) throws HiveException {
    Map<String, Integer> mapColumnNames = new HashMap<String, Integer>();
    mapColumnNames.put("A", 0);
    VectorizationContext ctx = new VectorizationContext(mapColumnNames, 1);

    GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A", TypeInfoFactory.longTypeInfo);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);
View Full Code Here

    mapColumnNames.put("Key", 0);
    mapColumnNames.put("Value", 1);
    VectorizationContext ctx = new VectorizationContext(mapColumnNames, 2);
    Set<Object> keys = new HashSet<Object>();

    GroupByDesc desc = buildKeyGroupByDesc (ctx, aggregateName, "Value",
        TypeInfoFactory.longTypeInfo, "Key", TypeInfoFactory.longTypeInfo);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.GroupByDesc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.