Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.fileSinkDesc


    Utilities.addMapWork(mr, src, "a", op1);
    mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
    mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());

    // reduce side work
    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc
                                                     (tmpdir + "mapredplan1.out",
                                                      Utilities.defaultTd, false));

    Operator<extractDesc> op2 =  OperatorFactory.get
      (new extractDesc(getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
View Full Code Here


    Utilities.addMapWork(mr, src, "a", op1);
    mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
    mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());

    // reduce side work
    Operator<fileSinkDesc> op4 = OperatorFactory.get(new fileSinkDesc
                                                     (tmpdir + "mapredplan2.out",
                                                      Utilities.defaultTd, false));

    Operator<filterDesc> op3 =
      OperatorFactory.get(getTestFilterDesc("0"), op4);
View Full Code Here

    Utilities.addMapWork(mr, src2, "b", op2);
    mr.getTagToValueDesc().add(op2.getConf().getValueSerializeInfo());

    // reduce side work
    Operator<fileSinkDesc> op4 = OperatorFactory.get(new fileSinkDesc
                                                     (tmpdir + "mapredplan3.out",
                                                      Utilities.defaultTd, false));

    Operator<selectDesc> op5 =  OperatorFactory.get
      (new selectDesc
View Full Code Here

    Utilities.addMapWork(mr, src, "a", op4);
    mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
    mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());

    // reduce side work
    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc
                                                     (tmpdir + "mapredplan4.out",
                                                      Utilities.defaultTd, false));

    Operator<extractDesc> op2 =  OperatorFactory.get
      (new extractDesc(getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
View Full Code Here

    Utilities.addMapWork(mr, src, "a", op4);
    mr.setKeyDesc(op0.getConf().getKeySerializeInfo());
    mr.getTagToValueDesc().add(op0.getConf().getValueSerializeInfo());

    // reduce side work
    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc
                                                     (tmpdir + "mapredplan5.out",
                                                      Utilities.defaultTd, false));

    Operator<extractDesc> op2 =  OperatorFactory.get
      (new extractDesc(getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
View Full Code Here

    Utilities.addMapWork(mr, src, "a", op4);
    mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
    mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());

    // reduce side work
    Operator<fileSinkDesc> op3 = OperatorFactory.get(new fileSinkDesc
                                                     (tmpdir + "mapredplan6.out",
                                                      Utilities.defaultTd, false));

    Operator<filterDesc> op2 =
      OperatorFactory.get(getTestFilterDesc("0"), op3);
View Full Code Here

      PlanUtils.getIntermediateFileTableDesc(PlanUtils.getFieldSchemasFromRowSchema(parent.getSchema(), "temporarycol"));
   
    // Create a file sink operator for this file name
    Operator<? extends Serializable> fs_op =
      putOpInsertMap(OperatorFactory.get
                     (new fileSinkDesc(taskTmpDir, tt_desc,
                                       parseCtx.getConf().getBoolVar(HiveConf.ConfVars.COMPRESSINTERMEDIATE)),
                      parent.getSchema()), null, parseCtx);
   
    // replace the reduce child with this operator
    List<Operator<? extends Serializable>> childOpList = parent.getChildOperators();
View Full Code Here

    ReduceSinkOperator rsOp = (ReduceSinkOperator)OperatorFactory.getAndMakeChild(rsDesc, fsRS, ts_op);
    mapredWork cplan = GenMapRedUtils.getMapRedWork();
    ParseContext parseCtx = ctx.getParseCtx();

    Task<? extends Serializable> mergeTask = TaskFactory.get(cplan, parseCtx.getConf());
    fileSinkDesc fsConf = fsOp.getConf();
   
    // Add the extract operator to get the value fields
    RowResolver out_rwsch = new RowResolver();
    RowResolver interim_rwsch = ctx.getParseCtx().getOpParseCtx().get(fsOp).getRR();
    Integer pos = Integer.valueOf(0);
    for(ColumnInfo colInfo: interim_rwsch.getColumnInfos()) {
      String [] info = interim_rwsch.reverseLookup(colInfo.getInternalName());
      out_rwsch.put(info[0], info[1],
                    new ColumnInfo(pos.toString(), colInfo.getType(), info[0],
                          colInfo.getIsPartitionCol()));
      pos = Integer.valueOf(pos.intValue() + 1);
    }

    Operator extract =
      OperatorFactory.getAndMakeChild(
        new extractDesc(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
                                Utilities.ReduceField.VALUE.toString(), "", false)),
        new RowSchema(out_rwsch.getColumnInfos()));
   
    tableDesc ts = (tableDesc)fsConf.getTableInfo().clone();
    fsConf.getTableInfo().getProperties().remove(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS);
    FileSinkOperator newOutput =
      (FileSinkOperator)OperatorFactory.getAndMakeChild(
         new fileSinkDesc(finalName, ts,
                          parseCtx.getConf().getBoolVar(HiveConf.ConfVars.COMPRESSRESULT)),
         fsRS, extract);

    cplan.setReducer(extract);
    ArrayList<String> aliases = new ArrayList<String>();
    aliases.add(fsConf.getDirName());
    cplan.getPathToAliases().put(fsConf.getDirName(), aliases);
    cplan.getAliasToWork().put(fsConf.getDirName(), ts_op);   
    cplan.getPathToPartitionInfo().put(fsConf.getDirName(), new partitionDesc(fsConf.getTableInfo(), null));
    cplan.setNumReduceTasks(-1);
   
    moveWork dummyMv = new moveWork(null, new loadFileDesc(fsOp.getConf().getDirName(), finalName, true, null, null), false);
    Task<? extends Serializable> dummyMergeTask = TaskFactory.get(dummyMv, ctx.getConf());
    List<Serializable> listWorks = new ArrayList<Serializable>();
View Full Code Here

      sel.setParentOperators(null);
     
      // Create a file sink operator for this file name
      Operator<? extends Serializable> fs_op =
        OperatorFactory.get
        (new fileSinkDesc(taskTmpDir, tt_desc,
                          parseCtx.getConf().getBoolVar(HiveConf.ConfVars.COMPRESSINTERMEDIATE)),
         mapJoin.getSchema());
     
      assert mapJoin.getChildOperators().size() == 1;
      mapJoin.getChildOperators().set(0, fs_op);
View Full Code Here

    // union context and later used to initialize the union plan
   
    // Create a file sink operator for this file name
    Operator<? extends Serializable> fs_op =
      OperatorFactory.get
      (new fileSinkDesc(taskTmpDir, tt_desc,
                        parseCtx.getConf().getBoolVar(HiveConf.ConfVars.COMPRESSINTERMEDIATE)),
       parent.getSchema());
   
    assert parent.getChildOperators().size() == 1;
    parent.getChildOperators().set(0, fs_op);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.fileSinkDesc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.