Package com.sap.hadoop.windowing.query2.translate

Examples of com.sap.hadoop.windowing.query2.translate.WindowFunctionTranslation


  }
 
  public static void validateWindowDefForWFn(TableFuncDef tFnDef, WindowFunctionDef wFnDef)
    throws WindowingException
  {
    WindowDef tWindow = tFnDef.getWindow();
    WindowDef fWindow = wFnDef.getWindow();
   
    PartitionDef tPart = tWindow == null ? null : tWindow.getPartDef();
    PartitionDef fPart = fWindow == null ? null : fWindow.getPartDef();
   
    if ( !TranslateUtils.isCompatible(tPart, fPart))
    {
      throw new WindowingException(
          sprintf("Window Function '%s' has an incompatible partition clause", wFnDef.getSpec()));
    }
   
    OrderDef tOrder = tWindow == null ? null : tWindow.getOrderDef();
    OrderDef fOrder = fWindow == null ? null : fWindow.getOrderDef();
    if ( !TranslateUtils.isCompatible(tOrder, fOrder))
    {
      throw new WindowingException(
          sprintf("Window Function '%s' has an incompatible order clause", wFnDef.getSpec()));
    }
View Full Code Here


        " into path='/tmp/wout2'" +
        " serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'" +
        " with serdeproperties('field.delim'=',')" +
        " format 'org.apache.hadoop.mapred.TextOutputFormat'");
   
    RangeBoundaryDef rBdef = (RangeBoundaryDef) qDef.getSelectList().getWindowFuncs().get(0).getWindow().getWindow().getStart();
    assert rBdef.getAmt() == 2;
    rBdef = (RangeBoundaryDef) qDef.getSelectList().getWindowFuncs().get(0).getWindow().getWindow().getEnd();
    assert rBdef.getAmt() == 2;
   
  }
View Full Code Here

  public static WindowFunctionDef translate(QueryDef qDef, TableFuncDef windowTableFnDef, WindowFunctionSpec wFnSpec) throws WindowingException
  {
    QueryTranslationInfo tInfo = qDef.getTranslationInfo();
    InputInfo iInfo = tInfo.getInputInfo(windowTableFnDef.getInput());

    WindowFunctionDef wFnDef = new WindowFunctionDef();
    wFnDef.setSpec(wFnSpec);
   
    /*
     * translate args
     */
    ArrayList<ASTNode> args = wFnSpec.getArgs();
    if ( args != null)
    {
      for(ASTNode expr : args)
      {
        ArgDef argDef = translateWindowFunctionArg(qDef, windowTableFnDef, iInfo,  expr);
        wFnDef.addArg(argDef);
      }
    }
   
    if ( RANKING_FUNCS.contains(wFnSpec.getName()))
    {
      setupRankingArgs(qDef, windowTableFnDef, wFnDef, wFnSpec);
    }
   
    WindowDef wDef = translateWindowSpec(qDef, iInfo, wFnSpec);
    wFnDef.setWindow(wDef);
    validateWindowDefForWFn(windowTableFnDef, wFnDef);
   
    setupEvaluator(wFnDef);
   
    return wFnDef;
View Full Code Here

    HiveQueryDef def = new HiveQueryDef();
    HiveQueryExecutor hiveQryExec = qDef.getTranslationInfo().getHiveQueryExecutor();
    Hive hive = qDef.getTranslationInfo().getHive();
   
    String tableName = hiveQryExec.createTableAsQuery(spec.getHiveQuery());
    HiveTableSpec tSpec = new HiveTableSpec();
    tSpec.setDbName(hive.getCurrentDatabase());
    tSpec.setTableName(tableName);
    tSpec.setPartition(spec.getPartition());
    tSpec.setOrder(spec.getOrder());
    def = (HiveQueryDef) InputTranslation.translate(qDef, tSpec, (HiveTableDef) def);
    return def;
  }
View Full Code Here

  }
 
  public void execute(QueryDef qDef, WindowingShell wShell) throws WindowingException
  {
    QueryTranslationInfo tInfo = qDef.getTranslationInfo();
    HiveTableSpec hvTblSpec = qDef.getInput().getHiveTableSpec();
   
    WindowingInput wIn = IOUtils.createTableWindowingInput(hvTblSpec.getDbName(), hvTblSpec.getTableName(), tInfo.getHiveCfg());
    //Partition p = IOUtils.createPartition(partClassName, partMemSize, wIn);
   
    PartitionsIterator partsItr = new PartitionsIterator(wIn, qDef);
   
    while(partsItr.hasNext())
View Full Code Here

  }
 
  public OrderColumnDef(ColumnDef cDef)
  {
    super(cDef);
    this.spec = new OrderColumnSpec(spec);
  }
View Full Code Here

    this.spec = spec;
  }
 
  public OrderDef(PartitionDef pDef)
  {
    this.spec = new OrderSpec(pDef.getSpec());
    for(ColumnDef cDef : pDef.getColumns())
    {
      addColumn(new OrderColumnDef(cDef));
    }
  }
View Full Code Here

    }

    @Override
    public QueryInputSpec next()
    {
      QueryInputSpec curr = nextInput;
      if ( curr instanceof TableFuncSpec)
      {
        TableFuncSpec tFunc = (TableFuncSpec) curr;
        nextInput = tFunc.getInput();
      }
View Full Code Here

    @Override
    public boolean hasNext()
    {
      if ( qSpecIt.hasNext())
      {
        QueryInputSpec iSpec = qSpecIt.next();
        if ( iSpec instanceof TableFuncSpec)
        {
          nextInput = (TableFuncSpec) iSpec;
          return true;
        }
View Full Code Here

  }
 
  public static void validateOutputSpec(QueryDef qDef) throws WindowingException
  {
    QueryTranslationInfo tInfo = qDef.getTranslationInfo();
    QueryOutputSpec spec = qDef.getSpec().getOutput();
   
    // ensure outputPath is specified. It is optional in grammar because it is not required in Hive mode.
    if ( spec == null || spec.getPath() == null )
    {
      throw new WindowingException("Query doesn't contain an output Path for results");
    }
   
    // if tableName is specified; validate it exists
    Table oTbl = null;
    if ( spec.getHiveTable() != null )
    {
      oTbl = getHiveTableDetails(tInfo.getHiveCfg(), spec.getHiveTable(), qDef.getInput().getHiveTableSpec());
    }
   
    // validate serDeClass
    if ( spec.getSerDeClass() == null )
    {
      if ( oTbl != null && oTbl.getSd().getSerdeInfo().isSetSerializationLib() )
      {
        spec.setSerDeClass(oTbl.getSd().getSerdeInfo().getSerializationLib());
        if ( oTbl.getSd().getSerdeInfo().isSetParameters() )
        {
          Iterator<Map.Entry<String, String>> props = oTbl.getSd().getSerdeInfo().getParameters().entrySet().iterator();
          while(props.hasNext())
          {
            Map.Entry<String, String> e = props.next();
            spec.addSerdeProperty(e.getKey(), e.getValue());
          }
        }
      }
      else
      {
        spec.setSerDeClass(com.sap.hadoop.windowing.Constants.DEFAULT_SERDE_CLASSNAME);
        spec.addSerdeProperty(org.apache.hadoop.hive.serde.Constants.FIELD_DELIM, ",");
      }
    }
   
    try
    {
      Class.forName(spec.getSerDeClass());
    }
    catch(Throwable t)
    {
      throw new WindowingException(sprintf("Unknown SerDe Class %s", spec.getSerDeClass()), t);
    }
   
    // validate outputFormat
    if ( spec.getOutputFormatClass() == null )
    {
      if ( oTbl != null )
      {
        spec.setOutputFormatClass(oTbl.getSd().getOutputFormat());
      }
      else
      {
        spec.setOutputFormatClass(Constants.DEFAULT_OUTPUTFORMAT_CLASSNAME);
      }
    }
    try
    {
      Class.forName(spec.getOutputFormatClass());
    }
    catch(Throwable t)
    {
      throw new WindowingException(
        sprintf("Unknown OutputFormat Class %s", spec.getOutputFormatClass()), t);
    }
   
    // ensure user has not specified a FormatClass
    if ( spec.getRecordWriterClass() != null )
    {
      throw new WindowingException("Illegal Output Spec: RecordWriter class not valid in MR mode");
    }
   
  }
View Full Code Here

TOP

Related Classes of com.sap.hadoop.windowing.query2.translate.WindowFunctionTranslation

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.