Package org.netbeans.gradle.model.internal

Examples of org.netbeans.gradle.model.internal.CustomSerializedMap$Deserializer


  /**
   * Check if the given serde is valid
   */
  private void validateSerDe(String serdeName) throws HiveException {
    try {
      Deserializer d = SerDeUtils.lookupDeserializer(serdeName);
      if(d != null) {
        System.out.println("Found class for " + serdeName);
      }
    } catch (SerDeException e) {
      throw new HiveException ("Cannot validate serde: " + serdeName, e);
View Full Code Here


    String tableName = String.valueOf(tblProps.getProperty("name"));
    String partName = String.valueOf(partSpec);
    //HiveConf.setVar(hconf, HiveConf.ConfVars.HIVETABLENAME, tableName);
    //HiveConf.setVar(hconf, HiveConf.ConfVars.HIVEPARTITIONNAME, partName);
    Deserializer deserializer = (Deserializer) sdclass.newInstance();
    deserializer.initialize(hconf, tblProps);
    StructObjectInspector rowObjectInspector = (StructObjectInspector)deserializer.getObjectInspector();

    MapOpCtx opCtx = null;
    // Next check if this table has partitions and if so
    // get the list of partition names as well as allocate
    // the serdes for the partition columns
View Full Code Here

  public ObjectInspector getOutputObjectInspector() throws HiveException {
    try {
      ObjectInspector outInspector;
      if (work.getTblDir() != null) {
        tableDesc tbl = work.getTblDesc();
        Deserializer serde = tbl.getDeserializerClass().newInstance();
        serde.initialize(job, tbl.getProperties());
        return serde.getObjectInspector();
      }
      else {
        List<partitionDesc> listParts = work.getPartDesc();
        currPart = listParts.get(0);
        serde = currPart.getTableDesc().getDeserializerClass().newInstance();
        serde.initialize(job, currPart.getTableDesc().getProperties());
        setPrtnDesc();
        currPart = null;
        return rowObjectInspector;
      }
    } catch (Exception e) {
View Full Code Here

  static public Deserializer getDeserializer(Configuration conf,
      org.apache.hadoop.hive.metastore.api.Partition part,
      org.apache.hadoop.hive.metastore.api.Table table) throws MetaException {
    String lib = part.getSd().getSerdeInfo().getSerializationLib();
    try {
      Deserializer deserializer = SerDeUtils.lookupDeserializer(lib);
      deserializer.initialize(conf, MetaStoreUtils.getSchema(part, table));
      return deserializer;
    } catch (RuntimeException e) {
      throw e;
    } catch (Exception e) {
      LOG.error("error in initSerDe: " + e.getClass().getName() + " " + e.getMessage());
View Full Code Here

  public Vector<StructField> getFields() {

    Vector<StructField> fields = new Vector<StructField> ();
    try {
      Deserializer decoder = getDeserializer();

      // Expand out all the columns of the table
      StructObjectInspector structObjectInspector = (StructObjectInspector)decoder.getObjectInspector();
      List<? extends StructField> fld_lst = structObjectInspector.getAllStructFieldRefs();
      for(StructField field: fld_lst) {
        fields.add(field);
      }
    } catch (SerDeException e) {
View Full Code Here

  /**
   * Return a deserializer object corresponding to the tableDesc
   */
  public Deserializer getDeserializer() throws Exception {
    Deserializer de = this.deserializerClass.newInstance();
    de.initialize(null, properties);
    return de;
  }
View Full Code Here

   */
  Operator genConversionSelectOperator(String dest, QB qb,
      Operator input, tableDesc table_desc) throws SemanticException {
    StructObjectInspector oi = null;
    try {
      Deserializer deserializer = table_desc.getDeserializerClass().newInstance();
      deserializer.initialize(conf, table_desc.getProperties());
      oi = (StructObjectInspector) deserializer.getObjectInspector();
    } catch (Exception e) {
      throw new SemanticException(e);
    }

    // Check column number
View Full Code Here

   *
   */
  static public Deserializer getDeserializer(Configuration conf, Properties schema) throws MetaException  {
    String lib = schema.getProperty(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_LIB);
    try {
      Deserializer deserializer = SerDeUtils.lookupDeserializer(lib);
      ((Deserializer)deserializer).initialize(conf, schema);
      return deserializer;
    } catch (Exception e) {
      LOG.error("error in initSerDe: " + e.getClass().getName() + " " + e.getMessage());
      MetaStoreUtils.printStackTrace(e);
View Full Code Here

   *
   */
  static public Deserializer getDeserializer(Configuration conf, org.apache.hadoop.hive.metastore.api.Table table) throws MetaException  {
    String lib = table.getSd().getSerdeInfo().getSerializationLib();
    try {
      Deserializer deserializer = SerDeUtils.lookupDeserializer(lib);
      deserializer.initialize(conf, MetaStoreUtils.getSchema(table));
      return deserializer;
    } catch (RuntimeException e) {
      throw e;
    } catch (Exception e) {
      LOG.error("error in initSerDe: " + e.getClass().getName() + " " + e.getMessage());
View Full Code Here

        boolean isNative = SerDeUtils.isNativeSerDe(tbl.getSd().getSerdeInfo().getSerializationLib());
        if (isNative)
          return tbl.getSd().getCols();
        else {
          try {
            Deserializer s = MetaStoreUtils.getDeserializer(this.hiveConf, tbl);
            return MetaStoreUtils.getFieldsFromDeserializer(tableName, s);
          } catch(SerDeException e) {
            StringUtils.stringifyException(e);
            throw new MetaException(e.getMessage());
          }
View Full Code Here

TOP

Related Classes of org.netbeans.gradle.model.internal.CustomSerializedMap$Deserializer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.