Package org.openiaml.model.inference

Examples of org.openiaml.model.inference.InfiniteSubProgressMonitor


      }
         
        });  
       
        // set up the sub progress monitor
        subProgressMonitor = new InfiniteSubProgressMonitor(monitor, 45);
       
        // to save memory, we only create the handler here, in this scope
        final ICreateElements handler = handlerFactory.createHandler(model);

        // allow for rules to override execution
        final OverridableCreateElementsHelper overridableHandler =
          new OverridableCreateElementsHelper(handler);
       
        // need to set up these variables before we insert the model,
        // otherwise the agenda cannot be built (as the rule heads use
        // {@link #getHelperFunctions()}).
        queue = new DroolsInsertionQueue(trackInsertions);
      workingMemory.setGlobal("handler", overridableHandler);
    workingMemory.setGlobal("queue", queue);
    workingMemory.setGlobal("functions", getHelperFunctions());   

        //go !
        workingMemory.insert( model );
        if (monitor.isCanceled()) {
          return;
        }
        subProgressMonitor.done();
        subProgressMonitor = null;
       
        monitor.subTask("Inferring new model elements");
       
        /*
         * This simply adds the Rule source for inserted elements
         * (where possible).
         */
        if (logRuleSource) {
          workingMemory.addEventListener(new DefaultWorkingMemoryEventListener() {
 
        @Override
        public void objectInserted(ObjectInsertedEvent event) {
          if (event.getObject() instanceof GeneratedElement) {
            GeneratedElement e = (GeneratedElement) event.getObject();
            try {
              handler.setGeneratedRule(e, event.getPropagationContext().getRuleOrigin().getName());
            } catch (InferenceException e1) {
              throw new RuntimeException(e1.getMessage(), e1);
            }
          }
        }
          });
        }
       
      subProgressMonitor = new InfiniteSubProgressMonitor(monitor, 50);
    subProgressMonitor.beginTask("Inferring elements iteratively", INSERTION_ITERATION_LIMIT);
        for (int k = 0; k < INSERTION_ITERATION_LIMIT; k++) {
          // check for monitor cancel
          if (monitor.isCanceled()) {
            return;
View Full Code Here


   * @throws IOException
   * @throws CoreException
   */
  @Override
  public IStatus doExecute(IFile o, IProgressMonitor monitor2) throws InferenceException, FileNotFoundException, IOException, CoreException {
    IProgressMonitor monitor = new InfiniteSubProgressMonitor(monitor2, 100);
   
    monitor.beginTask("Removing phantom edges: '" + o.getName() + "'", 60);
   
    monitor.subTask("Loading model");
    try {
      loadedModel = ModelLoader.load(o);
    } catch (ModelLoadException e) {
      return errorStatus(e);
    }
    monitor.worked(10);
   
    // load the handler to remove elements
    EcoreInferenceHandler handler = new EcoreInferenceHandler(loadedModel.eResource());
   
    // actually remove the edges
    IStatus status = doRemovePhantomEdges(loadedModel, handler, new SubProgressMonitor(monitor, 50));
    if (!status.isOK()) {
      return status;
    }
   
    if (monitor.isCanceled())
      return Status.CANCEL_STATUS;
   
    // save it
    monitor.subTask("Saving");
    loadedModel.eResource().save(getSaveOptions());
   
    // finished
    monitor.done();
   
    return Status.OK_STATUS;
  }
View Full Code Here

TOP

Related Classes of org.openiaml.model.inference.InfiniteSubProgressMonitor

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.