Package com.sap.hadoop.windowing

Examples of com.sap.hadoop.windowing.WindowingException


    {
      append(serDe.serialize(o, OI));
    }
    catch(Exception e)
    {
      throw new WindowingException(e);
    }
  }
View Full Code Here


    {
      ObjectInspectorConverters.getConverter(oi, PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
    }
    catch(Throwable t)
    {
      throw new WindowingException("Where Expr must be convertible to a boolean value", t);
    }
   
    whDef.setExprNode(exprNode);
    whDef.setExprEvaluator(exprEval);
    whDef.setOI(oi);
View Full Code Here

    case DOUBLE:
      return DOUBLE_COMPARATOR;
    case STRING:
      return STRING_COMPARATOR;
    default:
      throw new WindowingException("Unsupported primitive type: " + pC);
    }
  }
View Full Code Here

    QueryOutputSpec spec = qDef.getSpec().getOutput();
   
    // ensure outputPath is specified. It is optional in grammar because it is not required in Hive mode.
    if ( spec == null || spec.getPath() == null )
    {
      throw new WindowingException("Query doesn't contain an output Path for results");
    }
   
    // if tableName is specified; validate it exists
    Table oTbl = null;
    if ( spec.getHiveTable() != null )
    {
      oTbl = getHiveTableDetails(tInfo.getHiveCfg(), spec.getHiveTable(), qDef.getInput().getHiveTableSpec());
    }
   
    // validate serDeClass
    if ( spec.getSerDeClass() == null )
    {
      if ( oTbl != null && oTbl.getSd().getSerdeInfo().isSetSerializationLib() )
      {
        spec.setSerDeClass(oTbl.getSd().getSerdeInfo().getSerializationLib());
        if ( oTbl.getSd().getSerdeInfo().isSetParameters() )
        {
          Iterator<Map.Entry<String, String>> props = oTbl.getSd().getSerdeInfo().getParameters().entrySet().iterator();
          while(props.hasNext())
          {
            Map.Entry<String, String> e = props.next();
            spec.addSerdeProperty(e.getKey(), e.getValue());
          }
        }
      }
      else
      {
        spec.setSerDeClass(com.sap.hadoop.windowing.Constants.DEFAULT_SERDE_CLASSNAME);
        spec.addSerdeProperty(org.apache.hadoop.hive.serde.Constants.FIELD_DELIM, ",");
      }
    }
   
    try
    {
      Class.forName(spec.getSerDeClass());
    }
    catch(Throwable t)
    {
      throw new WindowingException(sprintf("Unknown SerDe Class %s", spec.getSerDeClass()), t);
    }
   
    // validate outputFormat
    if ( spec.getOutputFormatClass() == null )
    {
      if ( oTbl != null )
      {
        spec.setOutputFormatClass(oTbl.getSd().getOutputFormat());
      }
      else
      {
        spec.setOutputFormatClass(Constants.DEFAULT_OUTPUTFORMAT_CLASSNAME);
      }
    }
    try
    {
      Class.forName(spec.getOutputFormatClass());
    }
    catch(Throwable t)
    {
      throw new WindowingException(
        sprintf("Unknown OutputFormat Class %s", spec.getOutputFormatClass()), t);
    }
   
    // ensure user has not specified a FormatClass
    if ( spec.getRecordWriterClass() != null )
    {
      throw new WindowingException("Illegal Output Spec: RecordWriter class not valid in MR mode");
    }
   
  }
View Full Code Here

      db = db == null ? Hive.get(cfg).getCurrentDatabase() : db;
      return HiveUtils.getTable(db, tableName, cfg);
    }
    catch(HiveException he)
    {
      throw new WindowingException(he);
    }
  }
View Full Code Here

      serDeClass = (Class<? extends SerDe>) Class.forName(serDeClassName);
      serde = serDeClass.newInstance();
    }
    catch(Exception e)
    {
      throw new WindowingException("Internal error, initializing output SerDe", e);
    }
    StringBuilder colNames = new StringBuilder();
    StringBuilder colTypes = new StringBuilder();
    boolean first = true;
   
    for(ColumnDef cDef : selectDef.getColumns())
    {
      if (!first)
      {
        colNames.append(",");
        colTypes.append(",");
      }
      else
        first = false;
      colNames.append(cDef.getAlias());
      colTypes.append(TypeInfoUtils.getTypeInfoFromObjectInspector(cDef.getOI()).getTypeName());
    }
   
    serDeProps.setProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMNS, colNames.toString());
    serDeProps.setProperty(org.apache.hadoop.hive.serde.Constants.LIST_COLUMN_TYPES, colTypes.toString());
    try
    {
      serde.initialize(hCfg, serDeProps);
    }
    catch(SerDeException se)
    {
      throw new WindowingException("Failed to initialize output SerDe", se);
    }
   
    oDef.setSerDe(serde);
   
  }
View Full Code Here

    /*
     * validate that input chain ends in a Hive Query or TAble.
     */
    if ( !spec.getInput().sourcedFromHive() )
    {
      throw new WindowingException("Translation not supported for HdfsLocation based queries");
    }
   
    EnsureTableFunctionInQuery.execute(qDef);
    SlidePartitionAndOrderSpecs.execute(qDef);
    TranslateInputSpecs.execute(qDef);
View Full Code Here

    }
    else if ( inputDef instanceof TableFuncDef)
    {
      return "ptf_" + inputNum;
    }
    throw new WindowingException(sprintf("Internal Error: attempt to translate %s", inputDef.getSpec()));
  }
View Full Code Here

    {
      throw we;
    }
    catch (Exception he)
    {
      throw new WindowingException(he);
    }

    return def;
  }
View Full Code Here

    QueryTranslationInfo tInfo = qDef.getTranslationInfo();
   
    TableFunctionResolver tFn = FunctionRegistry.getTableFunctionResolver(tSpec.getName());
    if ( tFn == null)
    {
      throw new WindowingException(sprintf("Unknown Table Function %s", tSpec.getName()));
    }
   
    TableFuncDef tDef = new TableFuncDef();
    tDef.setSpec(tSpec);
    tDef.setInput(inputDef);
View Full Code Here

TOP

Related Classes of com.sap.hadoop.windowing.WindowingException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.