Package org.pentaho.di.trans

Examples of org.pentaho.di.trans.Trans


      final Repository repository = connectToRepository(logWriter);
      try
      {
        final TransMeta transMeta = loadTransformation(repository, resourceManager, resourceKey);
        transMeta.setArguments(params);
        final Trans trans = new Trans(transMeta);
        for (int i = 0; i < definedVariableNames.length; i++)
        {
          final ParameterMapping mapping = definedVariableNames[i];
          final String sourceName = mapping.getName();
          final String variableName = mapping.getAlias();
          final Object value = parameters.get(sourceName);
          if (value != null)
          {
            trans.setParameterValue(variableName, String.valueOf(value));
          }
        }

        transMeta.setInternalKettleVariables();
        trans.prepareExecution(transMeta.getArguments());

        TableProducer tableProducer = null;
        final List stepList = trans.getSteps();
        for (int i = 0; i < stepList.size(); i++)
        {
          final StepMetaDataCombi metaDataCombi = (StepMetaDataCombi) stepList.get(i);
          if (stepName.equals(metaDataCombi.stepname) == false)
          {
            continue;
          }
          final RowMetaInterface row = transMeta.getStepFields(stepName);
          tableProducer = new TableProducer(row, queryLimit, stopOnError);
          metaDataCombi.step.addRowListener(tableProducer);
          break;
        }

        if (tableProducer == null)
        {
          throw new ReportDataFactoryException("Cannot find the specified transformation step " + stepName);
        }

        currentlyRunningTransformation = trans;
        trans.startThreads();
        trans.waitUntilFinished();
        trans.cleanup();
        return tableProducer.getTableModel();
      }
      finally
      {
        currentlyRunningTransformation = null;
View Full Code Here


                                                  ResourceKey contextKey)
      throws ReportDataFactoryException, KettleException;

  public void cancelQuery()
  {
    final Trans currentlyRunningTransformation = this.currentlyRunningTransformation;
    if (currentlyRunningTransformation != null)
    {
      currentlyRunningTransformation.stopAll();
      this.currentlyRunningTransformation = null;
    }
  }
View Full Code Here

  private TableModel performQueryOnTransformation(final DataRow parameters,
                                                  final int queryLimit,
                                                  final DataFactoryContext context,
                                                  final TransMeta transMeta) throws EvaluationException, ParseException, KettleException, ReportDataFactoryException
  {
    final Trans trans = prepareTransformation(parameters, context, transMeta);

    StepInterface targetStep = findTargetStep(trans);

    final RowMetaInterface row = transMeta.getStepFields(getStepName());
    TableProducer tableProducer = new TableProducer(row, queryLimit, isStopOnError());
    targetStep.addRowListener(tableProducer);

    currentlyRunningTransformation = trans;
    try
    {
      trans.startThreads();
      trans.waitUntilFinished();
    }
    finally
    {
      trans.cleanup();
      currentlyRunningTransformation = null;
    }
    if (trans.getErrors() != 0 && isStopOnError()) {
      throw new ReportDataFactoryException(String.format
          ("Transformation reported %d records with errors and stop-on-error is true. Aborting.", trans.getErrors()));
    }

    return tableProducer.getTableModel();
  }
View Full Code Here

                                      final TransMeta transMeta) throws EvaluationException, ParseException, KettleException
  {
    final FormulaContext formulaContext = new WrappingFormulaContext(context.getFormulaContext(), parameters);
    final String[] params = fillArguments(formulaContext);

    final Trans trans = new Trans(transMeta);
    trans.setArguments(params);
    updateTransformationParameter(formulaContext, trans);
    transMeta.setInternalKettleVariables();
    trans.prepareExecution(params);
    return trans;
  }
View Full Code Here

                                                  ResourceKey contextKey)
      throws ReportDataFactoryException, KettleException;

  public void cancelQuery()
  {
    final Trans currentlyRunningTransformation = this.currentlyRunningTransformation;
    if (currentlyRunningTransformation != null)
    {
      currentlyRunningTransformation.stopAll();
      this.currentlyRunningTransformation = null;
    }
  }
View Full Code Here

    String systemSolutionfolder = PentahoSystem.getApplicationContext().getSolutionPath( "system" );
    String jobFileFullPath = systemSolutionfolder + "/" + getTransFileName();

    TransMeta transMeta = new TransMeta( jobFileFullPath );
    if ( transMeta != null ) {
      Trans trans = new Trans( transMeta );
      trans.execute( null );
      trans.waitUntilFinished();
    }
  }
View Full Code Here

    return true;
  }

  private boolean executeTransformation( final TransMeta transMeta ) {
    boolean success = true;
    Trans trans = null;

    try {
      if ( transMeta != null ) {
        try {
          trans = new Trans( transMeta );
        } catch ( Exception e ) {
          throw new KettleComponentException( Messages.getInstance().getErrorString(
              "Kettle.ERROR_0010_BAD_TRANSFORMATION_METADATA" ), e ); //$NON-NLS-1$
        }
      }

      if ( trans == null ) {
        throw new KettleComponentException( Messages.getInstance().getErrorString(
            "Kettle.ERROR_0010_BAD_TRANSFORMATION_METADATA" ) ); //$NON-NLS-1$
      }

      // Remember where to get our execution logging from
      //
      logChannelId = trans.getLogChannelId();

      // OK, we have the transformation, now run it!
      if ( !customizeTrans( trans ) ) {
        throw new KettleComponentException( Messages.getInstance().getErrorString(
            "Kettle.ERROR_0028_CUSTOMIZATION_FUNCITON_FAILED" ) ); //$NON-NLS-1$
      }

      debug( Messages.getInstance().getString( "Kettle.DEBUG_PREPARING_TRANSFORMATION" ) ); //$NON-NLS-1$

      try {
        LogLevel lvl = getLogLevel();
        trans.setLogLevel( lvl );
        trans.prepareExecution( transMeta.getArguments() );
      } catch ( Exception e ) {
        throw new KettleComponentException( Messages.getInstance().getErrorString(
            "Kettle.ERROR_0011_TRANSFORMATION_PREPARATION_FAILED" ), e ); //$NON-NLS-1$
      }

      String stepName = null;
      String outputName = null;

      try {
        debug( Messages.getInstance().getString( "Kettle.DEBUG_FINDING_STEP_IMPORTER" ) ); //$NON-NLS-1$

        stepName = getMonitorStepName();
        outputName = getTransformSuccessOutputName();

        if ( outputName != null ) {
          registerAsStepListener( stepName, trans );
        }
      } catch ( Exception e ) {
        throw new KettleComponentException( Messages.getInstance().getErrorString(
            "Kettle.ERROR_0012_ROW_LISTENER_CREATE_FAILED" ), e ); //$NON-NLS-1$
      }

      try {
        debug( Messages.getInstance().getString( "Kettle.DEBUG_STARTING_TRANSFORMATION" ) ); //$NON-NLS-1$
        trans.startThreads();
      } catch ( Exception e ) {
        throw new KettleComponentException( Messages.getInstance().getErrorString(
            "Kettle.ERROR_0013_TRANSFORMATION_START_FAILED" ), e ); //$NON-NLS-1$
      }

      try {
        // It's running in a separate thread to allow monitoring,
        // etc.
        debug( Messages.getInstance().getString( "Kettle.DEBUG_TRANSFORMATION_RUNNING" ) ); //$NON-NLS-1$

        trans.waitUntilFinished();
        cleanLogChannel( trans );
        trans.cleanup();
      } catch ( Exception e ) {
        throw new KettleComponentException( Messages.getInstance().getErrorString(
            "Kettle.ERROR_0014_ERROR_DURING_EXECUTE" ), e ); //$NON-NLS-1$
      }
View Full Code Here

TOP

Related Classes of org.pentaho.di.trans.Trans

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.