Package org.apache.hadoop.hive.serde2

Examples of org.apache.hadoop.hive.serde2.SerDe


      boolean noOuterJoin) {
    TableDesc desc = getSpillTableDesc(alias, spillTableDesc, conf, noOuterJoin);
    if (desc == null) {
      return null;
    }
    SerDe sd = (SerDe) ReflectionUtils.newInstance(desc.getDeserializerClass(),
        null);
    try {
      sd.initialize(null, desc.getProperties());
    } catch (SerDeException e) {
      e.printStackTrace();
      return null;
    }
    return sd;
View Full Code Here


      List<ObjectInspector> structFieldObjectInspectors,
      Byte alias,int containerSize, Map<Byte, TableDesc> spillTableDesc,
      JoinDesc conf,boolean noOuterJoin) throws HiveException {

    TableDesc tblDesc = JoinUtil.getSpillTableDesc(alias,spillTableDesc,conf, noOuterJoin);
    SerDe serde = JoinUtil.getSpillSerDe(alias, spillTableDesc, conf,
        noOuterJoin);

    if (serde == null) {
      containerSize = -1;
    }
View Full Code Here

      cTypes.append(fields.get(i).getTypeName());
    }

    try
    {
      SerDe serDe = new LazyBinarySerDe();
      Properties p = new Properties();
      p.setProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS,
          cNames.toString());
      p.setProperty(
          org.apache.hadoop.hive.serde.Constants.LIST_COLUMN_TYPES,
          cTypes.toString());
      serDe.initialize(cfg, p);
      return serDe;
    }
    catch (SerDeException se)
    {
      throw new WindowingException(se);
View Full Code Here

      TableFunctionEvaluator tEval) throws WindowingException
  {
    /*
     * setup the SerDe.
     */
    SerDe serde = null;
    // treat Noop Function special because it just hands the input Partition
    // to the next function in the chain.
    if (tDef.getName().equals(FunctionRegistry.NOOP_TABLE_FUNCTION)
        || tDef.getName().equals(
            FunctionRegistry.NOOP_MAP_TABLE_FUNCTION))
    {
      serde = inputDef.getSerde();
    }
    else
    {
      serde = TranslateUtils.createLazyBinarySerDe(tInfo.getHiveCfg(),
          tEval.getOutputOI());
    }
    tDef.setSerde(serde);

    try
    {
      tDef.setOI((StructObjectInspector) serde.getObjectInspector());
    }
    catch (SerDeException se)
    {
      throw new WindowingException(se);
    }

    if (tEval.isTransformsRawInput())
    {
      if (tDef.getName().equals(FunctionRegistry.NOOP_MAP_TABLE_FUNCTION))
      {
        serde = inputDef.getSerde();
      }
      else
      {
        serde = TranslateUtils.createLazyBinarySerDe(
            tInfo.getHiveCfg(), tEval.getRawInputOI());
      }
      try
      {
        tDef.setMapOI((StructObjectInspector) serde
            .getObjectInspector());
      }
      catch (SerDeException se)
      {
        throw new WindowingException(se);
View Full Code Here

      throws WindowingException
  {
    String serDeClassName = oDef.getSpec().getSerDeClass();
    Properties serDeProps = oDef.getSpec().getSerDeProps();
    Class<? extends SerDe> serDeClass;
    SerDe serde;
   
    try
    {
      serDeClass = (Class<? extends SerDe>) Class.forName(serDeClassName);
      serde = serDeClass.newInstance();
    }
    catch(Exception e)
    {
      throw new WindowingException("Internal error, initializing output SerDe", e);
    }
    StringBuilder colNames = new StringBuilder();
    StringBuilder colTypes = new StringBuilder();
    boolean first = true;
   
    for(ColumnDef cDef : selectDef.getColumns())
    {
      if (!first)
      {
        colNames.append(",");
        colTypes.append(",");
      }
      else
        first = false;
      colNames.append(cDef.getAlias());
      colTypes.append(TypeInfoUtils.getTypeInfoFromObjectInspector(cDef.getOI()).getTypeName());
    }
   
    serDeProps.setProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS, colNames.toString());
    serDeProps.setProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMN_TYPES, colTypes.toString());
    try
    {
      serde.initialize(hCfg, serDeProps);
    }
    catch(SerDeException se)
    {
      throw new WindowingException("Failed to initialize output SerDe", se);
    }
View Full Code Here

      serDeProps.setProperty(serdeName, serdePropsMap.get(serdeName));
    }

    try
    {
      SerDe serDe = (SerDe) SerDeUtils.lookupDeserializer(serDeClassName);
      serDe.initialize(hConf, serDeProps);
      hiveTable.setSerde(serDe);
      hiveTable.setOI((StructObjectInspector) serDe.getObjectInspector());
    }
    catch (SerDeException se)
    {
      throw new WindowingException(se);
    }
View Full Code Here

    String serDeClassName = output.getOutputSpec().getSerDeClass();
    Properties serDeProps = output.getOutputSpec().getSerDeProps();

    try
    {
      SerDe serDe = (SerDe) SerDeUtils.lookupDeserializer(serDeClassName);
      serDe.initialize(hConf, serDeProps);
      output.setSerDe(serDe);
    }
    catch (SerDeException se)
    {
      throw new WindowingException(se);
View Full Code Here

  public void printQueryOutput(QueryDef qry, HiveConf cfg) throws WindowingException
  {
    try
    {
      JobConf jCfg = new JobConf(cfg);
      SerDe outSerDe = setupOutputSerDe(qry, jCfg);
      RowSchema rSchema = getQueryOutputRowSchema(qry, jCfg);
      TableDesc tDesc = setupTableDesc(rSchema);

      tDesc.setDeserializerClass(qry.getOutput().getSerDe().getClass());
      String outputFormatClassName = qry.getOutput().getSpec()
          .getOutputFormatClass();
      Class<? extends OutputFormat> outputFormatClass = (outputFormatClassName != null) ? (Class<? extends OutputFormat>) Class
          .forName(outputFormatClassName)
          : SequenceFileOutputFormat.class;
      // todo this is hack; check how this is done in Hive
      tDesc.setInputFileFormatClass(mapToInputFormat(outputFormatClass));

      tDesc.setProperties(qry.getOutput().getSpec().getSerDeProps());

      FetchOperator ftOp = setupFetchOperator(qry, tDesc, jCfg);

      while (true)
      {
        InspectableObject io = ftOp.getNextRow();
        if (io == null)
        {
          return;
        }

        String s = ((Text) outSerDe.serialize(io.o, io.oi)).toString();
        printOutput(s);
      }
    }
    catch (WindowingException we)
    {
View Full Code Here

     
      /* from FetchWork initialize */
      String serdeName = HiveConf.getVar(jcfg, HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE);
      Class<? extends SerDe> serdeClass = Class.forName(serdeName, true, JavaUtils.getClassLoader()).asSubclass(SerDe.class);
     
      SerDe serDe = (SerDe) ReflectionUtils.newInstance(serdeClass, null);
   
      Properties serdeProp = new Properties();
   
      // this is the default serialization format
      if (serDe instanceof DelimitedJSONSerDe) {
        serdeProp.put(Constants.SERIALIZATION_FORMAT, "" + Utilities.tabCode);
        serdeProp.put(Constants.SERIALIZATION_NULL_FORMAT, "NULL");
      }
   
      serDe.initialize(jcfg, serdeProp);
      /* end: from FetchWork initialize */
   
    return serDe;
    }
    catch(Exception e)
View Full Code Here

  public static Partition createPartition(String partitionClass,
      int partitionMemSize, WindowingInput wIn) throws WindowingException
  {
    try
    {
      SerDe serDe = (SerDe) wIn.getDeserializer();
      StructObjectInspector oI = (StructObjectInspector) serDe
          .getObjectInspector();
      Partition p = new Partition(partitionClass, partitionMemSize,
          serDe, oI);
      Writable w = wIn.createRow();
      while( wIn.next(w) != -1)
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.SerDe

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.