Package com.esri.gpt.framework.resource.query

Examples of com.esri.gpt.framework.resource.query.Result


public void execute() {
  RequestContext context = RequestContext.extract(null);
 
  boolean success = false;
  long count = 0;
  Result result = null;
  LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Starting harvesting through unit: {0}", unit);
  if (isActive()) {
    dataProcessor.onStart(unit);
  }

  ExecutionUnitHelper helper = new ExecutionUnitHelper(unit);
  // get report builder
  ReportBuilder rp = helper.getReportBuilder();

  try {
    result = executeQuery();
    Iterable<Publishable> records = new PublishablesAdapter(new FlatResourcesAdapter(result.getResources()));
    for (Publishable r : records) {
      if (!isActive()){
        unit.setCleanupFlag(false);
        break;
      }
      count++;
      LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Harvested metadata #{0} of source URI: \"{1}\" through unit: {2}", new Object[]{rp.getHarvestedCount()+1, r.getSourceUri(), unit});
      if (isSuspendedWithAck()) {
        while (isSuspended()) {
          try {
            synchronized (this) {
              wait();
            }
          } catch (InterruptedException ex) {

          }
          if (!isActive()) {
            break;
          }
        }
      }
      if (isActive()) {
        dataProcessor.onMetadata(unit, r);
      }
    }
   
    success = true;
   
    if (isActive()) {
      // save last sync date
      unit.getRepository().setLastSyncDate(rp.getStartTime());
      HrUpdateLastSyncDate updLastSyncDate = new HrUpdateLastSyncDate(context, unit.getRepository());
      updLastSyncDate.execute();
    }
  } catch (Exception ex) {
    rp.setException(ex);
    unit.setCleanupFlag(false);
    LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Failed harvesting through unit: {0}. Cause: {1}", new Object[]{unit, ex.getMessage()});
    dataProcessor.onIterationException(unit, ex);
  } finally {
    try {
      if (!isShutdown()) {
        dataProcessor.onEnd(unit, success);
        context.onExecutionPhaseCompleted();
      }
    } finally {
      if (result!=null) {
        result.destroy();
      }
    }
    LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Completed harvesting through unit: {0}. Obtained {1} records.", new Object[]{unit, count});
  }
}
View Full Code Here


}

  @Override
public Result execute() {
  LOGGER.finer("Executing query: " + this);
  Result r = new CommonResult(new JoinResourcesAdapter(new NativeIterable(), new CswFolders(context, proxy, criteria))) {
    @Override
    public void destroy() {
      proxy.destroy();
    }
  };
View Full Code Here

  this.criteria = criteria;
}

public Result execute()  {
  LOGGER.log(Level.FINER, "Executing query: {0}", this);
  Result r = new CommonResult(new AgpFolders(context, info, criteria));
  LOGGER.log(Level.FINER, "Completed query execution: {0}", this);
  return r;
}
View Full Code Here

          LOGGER.log(Level.SEVERE, "Error iterating through AGS resources.", ex);
        }
      }, source);
     
      Query newQuery = qb.newQuery(new CommonCriteria());
      Result result = newQuery.execute();

      this.destination.getConnection().generateToken();
     
      Iterable<IServiceInfoProvider> records = new ServiceInfoProviderAdapter(new FlatResourcesAdapter(result.getResources()));
      for (IServiceInfoProvider r: records) {
        if (!doContinue()) {
          break;
        }
        ServiceInfo serviceInfo = r.getServiceInfo();
View Full Code Here

  public void execute() {
    RequestContext context = RequestContext.extract(null);

    boolean success = false;
    long count = 0;
    Result result = null;
    final ExecutionUnit unit = getExecutionUnit();
    LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Starting pushing through unit: {0}", unit);
    if (isActive()) {
      getProcessor().onStart(getExecutionUnit());
    }

    ExecutionUnitHelper helper = new ExecutionUnitHelper(getExecutionUnit());
    // get report builder
    final ReportBuilder rp = helper.getReportBuilder();

    try {
      Protocol protocol = getExecutionUnit().getRepository().getProtocol();
      if (protocol instanceof HarvestProtocolAgs2Agp) {
        HarvestProtocolAgs2Agp ags2agp = (HarvestProtocolAgs2Agp)protocol;
        ArcGISInfo source = ags2agp.getSource();
        AgpDestination destination = ags2agp.getDestination();
       
        Ags2AgpCopy copy = new Ags2AgpCopy(source, destination){
          private long counter;
         
          @Override
          protected boolean syncItem(AgpItem sourceItem) throws Exception {
            counter++;
            String sourceUri = sourceItem.getProperties().getValue("id");
            try {
              boolean result = super.syncItem(sourceItem);
              rp.createEntry(sourceUri, result);
              LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Pushed item #{0} of source URI: \"{1}\" through unit: {2}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit});
              return result;
            } catch (AgpException ex) {
              LOGGER.log(Level.WARNING, "[SYNCHRONIZER] Failed pushing item #{0} of source URI: \"{1}\" through unit: {2}. Reason: {3}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit, ex.getMessage()});
              rp.createUnpublishedEntry(sourceUri, Arrays.asList(new String[]{ex.getMessage()}));
              return false;
            } catch (HttpClientException ex) {
              LOGGER.log(Level.WARNING, "[SYNCHRONIZER] Failed pushing item #{0} of source URI: \"{1}\" through unit: {2}. Reason: {3}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit, ex.getMessage()});
              rp.createUnpublishedEntry(sourceUri, Arrays.asList(new String[]{ex.getMessage()}));
              return false;
            } catch (Exception ex) {
              throw ex;
            }
          }

          @Override
          protected boolean doContinue() {
            boolean doContinue = Ags2AgpExecutor.this.isActive();
            if (!doContinue) {
              unit.setCleanupFlag(false);
            }
            return doContinue;
          }
        };
       
        copy.copy();
      }

      success = true;

      if (isActive()) {
        // save last sync date
        getExecutionUnit().getRepository().setLastSyncDate(rp.getStartTime());
        HrUpdateLastSyncDate updLastSyncDate = new HrUpdateLastSyncDate(context, unit.getRepository());
        updLastSyncDate.execute();
      }
    } catch (Exception ex) {
      rp.setException(ex);
      unit.setCleanupFlag(false);
      LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Failed pushing through unit: {0}. Cause: {1}", new Object[]{unit, ex.getMessage()});
      getProcessor().onIterationException(getExecutionUnit(), ex);
    } finally {
      if (!isShutdown()) {
        getProcessor().onEnd(unit, success);
        context.onExecutionPhaseCompleted();
      }
      if (result != null) {
        result.destroy();
      }
      LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Completed pushing through unit: {0}. Obtained {1} records.", new Object[]{unit, count});
    }
  }
View Full Code Here

  public void execute() {
    RequestContext context = RequestContext.extract(null);

    boolean success = false;
    long count = 0;
    Result result = null;
    final ExecutionUnit unit = getExecutionUnit();
    LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Starting pushing through unit: {0}", unit);
    if (isActive()) {
      getProcessor().onStart(getExecutionUnit());
    }

    ExecutionUnitHelper helper = new ExecutionUnitHelper(getExecutionUnit());
    // get report builder
    final ReportBuilder rp = helper.getReportBuilder();

    try {
      Protocol protocol = getExecutionUnit().getRepository().getProtocol();
      if (protocol instanceof HarvestProtocolAgp2Agp) {
        HarvestProtocolAgp2Agp agp2agp = (HarvestProtocolAgp2Agp)protocol;
        AgpSource source = agp2agp.getSource();
        AgpDestination destination = agp2agp.getDestination();
        AgpPush agpPush = new AgpPush(source, destination) {
          private long counter;
         
          @Override
          protected boolean syncItem(AgpItem sourceItem) throws Exception {
            counter++;
            String sourceUri = sourceItem.getProperties().getValue("id");
            try {
              boolean result = super.syncItem(sourceItem);
              if (result) {
                rp.createEntry(sourceUri, result);
                LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Pushed item #{0} of source URI: \"{1}\" through unit: {2}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit});
              } else {
                rp.createUnpublishedEntry(sourceUri, Arrays.asList(new String[]{"Ignored"}));
                LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Rejected item #{0} of source URI: \"{1}\" through unit: {2}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit});
              }
              return result;
            } catch (AgpException ex) {
              if (stopOnError) {
                throw ex;
              }
              LOGGER.log(Level.WARNING, "[SYNCHRONIZER] Failed pushing item #{0} of source URI: \"{1}\" through unit: {2}. Reason: {3}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit, ex.getMessage()});
              rp.createUnpublishedEntry(sourceUri, Arrays.asList(new String[]{ex.getMessage()}));
              return false;
            } catch (HttpClientException ex) {
              if (stopOnError) {
                throw ex;
              }
              LOGGER.log(Level.WARNING, "[SYNCHRONIZER] Failed pushing item #{0} of source URI: \"{1}\" through unit: {2}. Reason: {3}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit, ex.getMessage()});
              rp.createUnpublishedEntry(sourceUri, Arrays.asList(new String[]{ex.getMessage()}));
              return false;
            } catch (Exception ex) {
              throw ex;
            }
          }

          @Override
          protected boolean doContinue() {
            boolean doContinue = Agp2AgpExecutor.this.isActive();
            if (!doContinue) {
              unit.setCleanupFlag(false);
            }
            return doContinue;
          }
         
        };
        agpPush.synchronize();
      }

      success = true;

      if (isActive()) {
        // save last sync date
        getExecutionUnit().getRepository().setLastSyncDate(rp.getStartTime());
        HrUpdateLastSyncDate updLastSyncDate = new HrUpdateLastSyncDate(context, unit.getRepository());
        updLastSyncDate.execute();
      }
    } catch (Exception ex) {
      rp.setException(ex);
      unit.setCleanupFlag(false);
      LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Failed pushing through unit: {0}. Cause: {1}", new Object[]{unit, ex.getMessage()});
      getProcessor().onIterationException(getExecutionUnit(), ex);
    } finally {
      if (!isShutdown()) {
        getProcessor().onEnd(unit, success);
        context.onExecutionPhaseCompleted();
      }
      if (result != null) {
        result.destroy();
      }
      LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Completed pushing through unit: {0}. Obtained {1} records.", new Object[]{unit, count});
    }
  }
View Full Code Here

      System.out.println("newQuery");
      CommonCriteria crt = new CommonCriteria();
      crt.setMaxRecords(5);
      Query query = instance.newQuery(crt);
      assertNotNull(query);
      Result result = query.execute();
      assertNotNull(result);
      Iterable<Resource> resources = result.getResources();
      assertNotNull(resources);
      int count = 0;
      for (Resource resource : resources) {
        count++;
      }
View Full Code Here

TOP

Related Classes of com.esri.gpt.framework.resource.query.Result

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.