Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.tableDesc


    mapredWork gWork = Utilities.getMapRedWork(job);
    reducer = gWork.getReducer();
    reducer.setMapredWork(gWork);
    isTagged = gWork.getNeedsTagging();
    try {
      tableDesc keyTableDesc = gWork.getKeyDesc();
      inputKeyDeserializer = (SerDe)ReflectionUtils.newInstance(keyTableDesc.getDeserializerClass(), null);
      inputKeyDeserializer.initialize(null, keyTableDesc.getProperties());
      keyObjectInspector = inputKeyDeserializer.getObjectInspector();
      for(int tag=0; tag<gWork.getTagToValueDesc().size(); tag++) {
        // We should initialize the SerDe with the TypeInfo when available.
        tableDesc valueTableDesc = gWork.getTagToValueDesc().get(tag);
        inputValueDeserializer[tag] = (SerDe)ReflectionUtils.newInstance(valueTableDesc.getDeserializerClass(), null);
        inputValueDeserializer[tag].initialize(null, valueTableDesc.getProperties());
        valueObjectInspector[tag] = inputValueDeserializer[tag].getObjectInspector();
       
        ArrayList<ObjectInspector> ois = new ArrayList<ObjectInspector>();
        ois.add(keyObjectInspector);
        ois.add(valueObjectInspector[tag]);
View Full Code Here


    prop.setProperty("columns", colTypes[0]);
    prop.setProperty("columns.types", colTypes[1]);

    fetchWork fetch = new fetchWork(
      ctx.getResFile(),
      new tableDesc(LazySimpleSerDe.class, TextInputFormat.class, IgnoreKeyTextOutputFormat.class, prop),
      -1
    );   
    fetch.setSerializationNullFormat(" ");
    return TaskFactory.get(fetch, this.conf);
  }
View Full Code Here

          continue;
        }

        partitionDesc pd = conf.getPathToPartitionInfo().get(onefile);
        LinkedHashMap<String, String> partSpec = pd.getPartSpec();
        tableDesc td = pd.getTableDesc();
        Properties p = td.getProperties();
        // Add alias, table name, and partitions to hadoop conf
        HiveConf.setVar(hconf, HiveConf.ConfVars.HIVETABLENAME, String.valueOf(p.getProperty("name")));
        HiveConf.setVar(hconf, HiveConf.ConfVars.HIVEPARTITIONNAME, String.valueOf(partSpec));
        try {
          Class sdclass = td.getDeserializerClass();
          if(sdclass == null) {
            String className = td.getSerdeClassName();
            if ((className == "") || (className == null)) {
              throw new HiveException("SerDe class or the SerDe class name is not set for table: " + td.getProperties().getProperty("name"));
            }
            sdclass = MapOperator.class.getClassLoader().loadClass(className);
          }
          deserializer = (Deserializer) sdclass.newInstance();
          deserializer.initialize(hconf, p);
View Full Code Here

    JobConf newjob = new JobConf(job);
    ArrayList<InputSplit> result = new ArrayList<InputSplit>();

    // for each dir, get the InputFormat, and do getSplits.
    for(Path dir: dirs) {
      tableDesc table = getTableDescFromPath(dir);
      // create a new InputFormat instance if this is the first time to see this class
      Class inputFormatClass = table.getInputFileFormatClass();
      InputFormat inputFormat = getInputFormatFromCache(inputFormatClass);

      FileInputFormat.setInputPaths(newjob, dir);
      newjob.setInputFormat(inputFormat.getClass());
      InputSplit[] iss = inputFormat.getSplits(newjob, numSplits/dirs.length);
View Full Code Here

    }
    JobConf newjob = new JobConf(job);

    // for each dir, get the InputFormat, and do validateInput.
    for(Path dir: dirs) {
      tableDesc table = getTableDescFromPath(dir);
      // create a new InputFormat instance if this is the first time to see this class
      InputFormat inputFormat = getInputFormatFromCache(table.getInputFileFormatClass());

      FileInputFormat.setInputPaths(newjob, dir);
      newjob.setInputFormat(inputFormat.getClass());
      inputFormat.validateInput(newjob);
    }
View Full Code Here

    }
    if (partDesc == null) {
      throw new IOException("cannot find dir = " + dir.toString() + " in partToPartitionInfo!");
    }

    tableDesc table = partDesc.getTableDesc();
    if (table == null) {
      throw new IOException("Input " + dir.toString() +
          " does not have associated InputFormat in mapredWork!");
    }
View Full Code Here

    assert taskTmpDirLst.size() == tt_descLst.size();
    int size = taskTmpDirLst.size();

    for (int pos = 0; pos < size; pos++) {
      String taskTmpDir = taskTmpDirLst.get(pos);
      tableDesc tt_desc = tt_descLst.get(pos);
      if (plan.getPathToAliases().get(taskTmpDir) == null) {
        plan.getPathToAliases().put(taskTmpDir, new ArrayList<String>());
        plan.getPathToAliases().get(taskTmpDir).add(taskTmpDir);
        plan.getPathToPartitionInfo().put(taskTmpDir, new partitionDesc(tt_desc, null));
        plan.getAliasToWork().put(taskTmpDir, currUnionOp);
View Full Code Here

    String taskTmpDir = (new Path(scratchDir + File.separator + randomid + '.' + pathid)).toString();
    pathid++;
    opProcCtx.setPathId(pathid);
   
    Operator<? extends Serializable> parent = op.getParentOperators().get(0);
    tableDesc tt_desc =
      PlanUtils.getBinaryTableDesc(PlanUtils.getFieldSchemasFromRowSchema(parent.getSchema(), "temporarycol"));
   
    // Create a file sink operator for this file name
    Operator<? extends Serializable> fs_op =
      putOpInsertMap(OperatorFactory.get
View Full Code Here

        sem.setFetchTaskInit(true);
        sem.getFetchTask().initialize(conf);
      }
      FetchTask ft = (FetchTask) sem.getFetchTask();

      tableDesc td = ft.getTblDesc();
      String tableName = "result";
      List<FieldSchema> lst = MetaStoreUtils.getFieldsFromDeserializer(
          tableName, td.getDeserializer());
      String schema = MetaStoreUtils.getDDLFromFieldSchema(tableName, lst);
      return schema;
    }
    return null;
  }
View Full Code Here

    else {
      uTask = uCtxTask.getUTask();
      uPlan = (mapredWork)uTask.getWork();
    }

    tableDesc tt_desc =
      PlanUtils.getBinaryTableDesc(PlanUtils.getFieldSchemasFromRowSchema(parent.getSchema(), "temporarycol"));
   
    // generate the temporary file
    String scratchDir = ctx.getScratchDir();
    int randomid = ctx.getRandomId();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.tableDesc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.