Package com.esri.gpt.framework.context

Examples of com.esri.gpt.framework.context.RequestContext


   *
   * @param links collection of resource links
   * @param more flag to indicate if there will be more arguments
   */
  protected void printLinks(ResourceLinks links, boolean more) {
    RequestContext rc = RequestContext.extract(null);
    ResourceIdentifier ri = ResourceIdentifier.newIdentifier(rc);

    println("\"links\"" + sp() + ":" + sp() + "[");
    levelUp();

View Full Code Here


   *
   * @param links collection of resource links
   * @param more flag to indicate if there will be more arguments
   */
  protected void printResources(ResourceLinks links, boolean more) {
    RequestContext rc = RequestContext.extract(null);
    ResourceIdentifier ri = ResourceIdentifier.newIdentifier(rc);

    println("\"resources\"" + sp() + ":" + sp() + "[");
    levelUp();

View Full Code Here

  /**
   * Gets property meanings.
   * @return property meanings
   */
  private PropertyMeanings getMeanings() throws DiscoveryException {
    RequestContext context = this.getQueryAdapter().getIndexAdapter().getRequestContext();
    return context.getCatalogConfiguration().getConfiguredSchemas().getPropertyMeanings();
  }
View Full Code Here

   * @param unit execution unit
   */
  @Override
  public void onEnd(final ExecutionUnit unit, boolean success) {
    final ExecutionUnitHelper helper = new ExecutionUnitHelper(unit);
    RequestContext context = RequestContext.extract(null);
    Date endTime = new Date();
    // get report builder
    ReportBuilder rp = helper.getReportBuilder();
   
    try {
      rp.setEndTime(endTime);
     
      // cleanup
      long deletedCount = performCleanup(context, unit, helper);
      rp.setDeletedCount(deletedCount);

      // prepare event record
      HeRecord event = new HeRecord(unit.getRepository());

      event.setHarvestedCount((int) rp.getHarvestedCount());
      event.setValidatedCount((int) rp.getValidatedCount());
      event.setPublishedCount((int) rp.getPublishedCount());
      event.setDeletedCount(deletedCount);

      // save report
      HeUpdateRequest updateReq = new HeUpdateRequest(context, event);
      updateReq.execute();

      HeUpdateReportRequest updateReportReq =
        new HeUpdateReportRequest(context, event);
      ReportStream reportStream = rp.createReportStream();

      InputStream in = null;
      try {
        in = reportStream.getStream();
        updateReportReq.execute(in, reportStream.getLength());
      } finally {
        if (in != null) {
          try {
            in.close();
          } catch (IOException ex) {
          }
        }
      }

      sendNotification(context, event);

      // notify listeners
      listener.onHarvestEnd(unit.getRepository());

      LOGGER.info("[SYNCHRONIZER] Completed processing metadata records through: " + unit + ". Harvested: " + rp.getHarvestedCount() + ", validated: " + rp.getValidatedCount() + ", published: " + rp.getPublishedCount());
    } catch (Exception ex) {
      LOGGER.log(Level.SEVERE, "[SYNCHRONIZER] Error completing metadata records through: " + unit, ex);
    } finally {
      rp.cleanup();
      try {
        if (helper.getSourceUris() != null) {
          helper.getSourceUris().close();
        }
      } catch (IOException ex) {
      }
      context.onExecutionPhaseCompleted();
    }
  }
View Full Code Here

    final ExecutionUnitHelper helper = new ExecutionUnitHelper(unit);
   
    // extract record information
    SourceUri sourceUri = record.getSourceUri();

    RequestContext context = RequestContext.extract(null);

    // get report builder
    ReportBuilder rp = helper.getReportBuilder();

    try {
      // immediately remove from the source URI's collection
      if (unit.getCleanupFlag()) {
        helper.getSourceUris().remove("uri", sourceUri.asString());
      }

      boolean proceed =
        (ProtocolInvoker.getUpdateDefinition(unit.getRepository().getProtocol()) && record instanceof Native)
        || (ProtocolInvoker.getUpdateContent(unit.getRepository().getProtocol()) && !(record instanceof Native));

      if (proceed) {
        // if there is 'from-date' criteria, check for all non-native records having
        // non-null last update date if that date is after 'from-date'. Only such a records
        // should be published
        if (!(record instanceof Native) && unit.getCriteria().getFromDate() != null) {
          Date lastUpdateDate = record.getUpdateDate();
          if (lastUpdateDate != null && lastUpdateDate.before(unit.getCriteria().getFromDate())) {
            // stop harvesting it
            proceed = false;
          }
        }
      }

      if (proceed) {
        String metadata = "";
        try {
          // notify listeners
          metadata = record.getContent();
          listener.onHarvestMetadata(unit.getRepository(), sourceUri, metadata);

          // publication request
          HarvesterRequest publicationRequest =
            new HarvesterRequest(context, unit.getPublisher(), unit.getRepository().getUuid(), sourceUri.asString(), metadata);
          publicationRequest.getPublicationRecord().setAutoApprove(ProtocolInvoker.getAutoApprove(unit.getRepository().getProtocol()));

          // if this is a repository descriptor, update repository record
          if (record instanceof Native && isRepositorySourceUri(sourceUri, unit.getRepository())) {
            String sMethod = MmdEnums.PublicationMethod.registration.toString();
            publicationRequest.getPublicationRecord().setUuid(unit.getRepository().getUuid());
            publicationRequest.getPublicationRecord().setPublicationMethod(sMethod);
            publicationRequest.getPublicationRecord().setAlternativeTitle(unit.getRepository().getName());
            publicationRequest.getPublicationRecord().setLockTitle(ProtocolInvoker.getLockTitle(unit.getRepository().getProtocol()));
          }

          publicationRequest.publish();
          boolean bReplaced =
            publicationRequest.getPublicationRecord().getWasDocumentReplaced();

          LOGGER.finer("[SYNCHRONIZER] SUCCEEDED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri);

          // notify listeners
          listener.onPublishMetadata(unit.getRepository(), sourceUri, publicationRequest.getPublicationRecord().getUuid(), metadata);

          // create harvest report entry for the current record
          rp.createEntry(sourceUri.asString(), !bReplaced);
        } catch (NullReferenceException ex) {
          if (LOGGER.isLoggable(Level.FINEST)) {
            LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
          } else {
            LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
          }
          rp.createInvalidEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
          listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
        } catch (ValidationException ex) {
          if (LOGGER.isLoggable(Level.FINEST)) {
            LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
          } else {
            LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
          }
          ArrayList<String> messages = new ArrayList<String>();
          ex.getValidationErrors().buildMessages(messageBroker, messages, true);
          rp.createInvalidEntry(sourceUri.asString(), messages);
          listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
        } catch (IllegalArgumentException ex) {
          if (LOGGER.isLoggable(Level.FINEST)) {
            LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
          } else {
            LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
          }
          rp.createUnpublishedEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
          listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
        } catch (SchemaException ex) {
          if (LOGGER.isLoggable(Level.FINEST)) {
            LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
          } else {
            LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
          }
          rp.createInvalidEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
          listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
        } catch (ImsServiceException ex) {
          if (LOGGER.isLoggable(Level.FINEST)) {
            LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
          } else {
            LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
          }
          rp.createUnpublishedEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
          listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
        } catch (SAXException ex) {
          if (LOGGER.isLoggable(Level.FINEST)) {
            LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
          } else {
            LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
          }
          rp.createInvalidEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
          listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
        } catch (TransformerConfigurationException ex) {
          throw ex;
        } catch (TransformerException ex) {
          if (LOGGER.isLoggable(Level.FINEST)) {
            LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
          } else {
            LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
          }
          rp.createInvalidEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
          listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
        } catch (HttpClientException ex) {
          if (LOGGER.isLoggable(Level.FINEST)) {
            LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
          } else {
            LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
          }
          rp.createInvalidEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
          listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
        }
      }
    } finally {
      context.onExecutionPhaseCompleted();
    }
  }
View Full Code Here

* @throws ConfigurationException if SearchConfig is null in
* ApplicationContext
*/
public static SearchConfig getConfiguredInstance() {
 
  RequestContext requestContext = RequestContext.extract(null);
  try {
    SearchConfig config =
      requestContext
      .getApplicationConfiguration()
      .getCatalogConfiguration()
      .getSearchConfig();

    if(config == null ) {
      throw new ConfigurationException("Got null Search Configuration from " +
      "ApplicationContext.CatalogConfiguration.searchConfig");
    }
    return config;
  } finally {
    if(requestContext != null) {
      requestContext.onExecutionPhaseCompleted();
    }
  }

}
View Full Code Here

    StringAttributeMap params = context.getConfiguration().getCatalogConfiguration().getParameters();
    String param = Val.chkStr(params.getValue("lucene.useLocalWriter"));
    boolean bUseLocalWriter = !param.equalsIgnoreCase("false");
   
    if (bUseLocalWriter) {
      RequestContext reqCtx = null;
      try {
        reqCtx = RequestContext.extract(null);
        LuceneIndexAdapter adapter = new LuceneIndexAdapter(reqCtx);
        adapter.touch();
      } catch (Exception e) {
        LOGGER.log(Level.SEVERE,"Error while touching IndexWriter on init.",e);
      } finally {
        if (reqCtx != null) reqCtx.onExecutionPhaseCompleted();
      }
    }
  }
View Full Code Here

   */
  private void process() throws IOException {
    LOGGER.info("Starting DCAT cache update process...");
    RestQuery query = new RestQuery();
    query.setResponseFormat("dcat");
    RequestContext context = RequestContext.extract(null);
    MessageBroker msgBroker = new MessageBroker();
    msgBroker.setBundleBaseName(MessageBroker.DEFAULT_BUNDLE_BASE_NAME);
   
    String baseContextPath = Val.chkStr(RequestContext.resolveBaseContextPath(null));
    if (baseContextPath.isEmpty()) {
View Full Code Here

    }
    addSpaces(sb,3);
    sb.append("],");
    addSpaces(sb,3);

    RequestContext context = RequestContext.extract(null);
    Task[] tasksInQueue = this.taskQueue.all(context);
    statWriter.writeElement("numberOfTasksInQueue",String.valueOf(tasksInQueue.length),true,true);
    addSpaces(sb,3);
    sb.append("\"tasksInQueue\": [");
    addSpaces(sb,4);
View Full Code Here

    this.stopOnError = stopOnError;
  }

  @Override
  public void execute() {
    RequestContext context = RequestContext.extract(null);

    boolean success = false;
    long count = 0;
    Result result = null;
    final ExecutionUnit unit = getExecutionUnit();
    LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Starting pushing through unit: {0}", unit);
    if (isActive()) {
      getProcessor().onStart(getExecutionUnit());
    }

    ExecutionUnitHelper helper = new ExecutionUnitHelper(getExecutionUnit());
    // get report builder
    final ReportBuilder rp = helper.getReportBuilder();

    try {
      Protocol protocol = getExecutionUnit().getRepository().getProtocol();
      if (protocol instanceof HarvestProtocolAgp2Agp) {
        HarvestProtocolAgp2Agp agp2agp = (HarvestProtocolAgp2Agp)protocol;
        AgpSource source = agp2agp.getSource();
        AgpDestination destination = agp2agp.getDestination();
        AgpPush agpPush = new AgpPush(source, destination) {
          private long counter;
         
          @Override
          protected boolean syncItem(AgpItem sourceItem) throws Exception {
            counter++;
            String sourceUri = sourceItem.getProperties().getValue("id");
            try {
              boolean result = super.syncItem(sourceItem);
              if (result) {
                rp.createEntry(sourceUri, result);
                LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Pushed item #{0} of source URI: \"{1}\" through unit: {2}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit});
              } else {
                rp.createUnpublishedEntry(sourceUri, Arrays.asList(new String[]{"Ignored"}));
                LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Rejected item #{0} of source URI: \"{1}\" through unit: {2}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit});
              }
              return result;
            } catch (AgpException ex) {
              if (stopOnError) {
                throw ex;
              }
              LOGGER.log(Level.WARNING, "[SYNCHRONIZER] Failed pushing item #{0} of source URI: \"{1}\" through unit: {2}. Reason: {3}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit, ex.getMessage()});
              rp.createUnpublishedEntry(sourceUri, Arrays.asList(new String[]{ex.getMessage()}));
              return false;
            } catch (HttpClientException ex) {
              if (stopOnError) {
                throw ex;
              }
              LOGGER.log(Level.WARNING, "[SYNCHRONIZER] Failed pushing item #{0} of source URI: \"{1}\" through unit: {2}. Reason: {3}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit, ex.getMessage()});
              rp.createUnpublishedEntry(sourceUri, Arrays.asList(new String[]{ex.getMessage()}));
              return false;
            } catch (Exception ex) {
              throw ex;
            }
          }

          @Override
          protected boolean doContinue() {
            boolean doContinue = Agp2AgpExecutor.this.isActive();
            if (!doContinue) {
              unit.setCleanupFlag(false);
            }
            return doContinue;
          }
         
        };
        agpPush.synchronize();
      }

      success = true;

      if (isActive()) {
        // save last sync date
        getExecutionUnit().getRepository().setLastSyncDate(rp.getStartTime());
        HrUpdateLastSyncDate updLastSyncDate = new HrUpdateLastSyncDate(context, unit.getRepository());
        updLastSyncDate.execute();
      }
    } catch (Exception ex) {
      rp.setException(ex);
      unit.setCleanupFlag(false);
      LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Failed pushing through unit: {0}. Cause: {1}", new Object[]{unit, ex.getMessage()});
      getProcessor().onIterationException(getExecutionUnit(), ex);
    } finally {
      if (!isShutdown()) {
        getProcessor().onEnd(unit, success);
        context.onExecutionPhaseCompleted();
      }
      if (result != null) {
        result.destroy();
      }
      LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Completed pushing through unit: {0}. Obtained {1} records.", new Object[]{unit, count});
View Full Code Here

TOP

Related Classes of com.esri.gpt.framework.context.RequestContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.