Package com.esri.gpt.framework.context

Examples of com.esri.gpt.framework.context.RequestContext


  /**
   * Gets servlet request
   * @return servlet request
   */
  private HttpServletRequest getServletRequest() {
    RequestContext context = this.extractRequestContext();
    return (HttpServletRequest) context.getServletRequest();
  }
View Full Code Here


public Capabilities getCapabilities() {
  return capabilities;
}

public Query newQuery(Criteria crt) {
  RequestContext requestContext = RequestContext.extract(null);
  try {
    ResourceProcessor processor = new ProcessorFactory().interrogate(newProcessingContext(requestContext), info.getUrl(), info.newCredentials());
    return processor.createQuery(context, crt);
  } catch (IOException ex) {
    context.onIterationException(ex);
    return null;
  } finally {
    requestContext.onExecutionPhaseCompleted();
  }
}
View Full Code Here

    requestContext.onExecutionPhaseCompleted();
  }
}

public Native getNativeResource() {
  RequestContext requestContext = RequestContext.extract(null);
  try {
    ResourceProcessor processor = new ProcessorFactory().interrogate(newProcessingContext(requestContext), info.getUrl(), info.newCredentials());
    return processor!=null? processor.getNativeResource(context): null;
  } catch (IOException ex) {
    context.onIterationException(ex);
    return null;
  } finally {
    requestContext.onExecutionPhaseCompleted();
  }
}
View Full Code Here

* @param uuids uuids of the records
* @return collection of records
* @throws SQLException if accessing database fails
*/
private HjRecords selectAll(String[] uuids) throws SQLException {
  RequestContext context = RequestContext.extract(null);
  try {
    HjLoadAllRequest loadAllRequest = new HjLoadAllRequest(context, uuids);
    loadAllRequest.execute();
    return loadAllRequest.getQueryResult().getRecords();
  } finally {
    context.onExecutionPhaseCompleted();
  }
}
View Full Code Here

* Withdraws all records.
* @param uuids uuids of the records to withdraw
* @throws SQLException if accessing database fails
*/
private void withdrawAll(String[] uuids) throws SQLException {
  RequestContext context = RequestContext.extract(null);
  try {
    HjWithdrawRequest withdrawRequest = new HjWithdrawRequest(context, uuids);
    withdrawRequest.execute();
  } finally {
    context.onExecutionPhaseCompleted();
  }
}
View Full Code Here

      // initialize
      searcher = getIndexAdapter().newSearcher();
      this.maxDoc = searcher.maxDoc();
      boolean bExecuteQuery = true;
      boolean bProcessHits = true;
      RequestContext reqContext = this.getIndexAdapter().getRequestContext();
      BooleanQuery rootQuery = new BooleanQuery();
      DiscoveryFilter discoveryFilter = discoveryQuery.getFilter();
      DiscoveryResult discoveryResult = discoveryQuery.getResult();
      Discoverables returnables = discoveryQuery.getReturnables();
      if ((returnables == null) || (returnables.size() == 0) ||
          (discoveryFilter.getMaxRecords() <= 0)) {
        bProcessHits = false;
      }
     
      // CSW query provider options
      boolean isDublinCoreResponse = true;
      boolean isBriefResponse = false;
      boolean isSummaryResponse = false;
      QueryOptions cswQueryOptions = (QueryOptions)reqContext.getObjectMap().get(
          "com.esri.gpt.server.csw.provider.components.QueryOptions");
     
      // build the query (if no query was supplied, we'll query everything)
      LogicalClauseAdapter logicalAdapter = new LogicalClauseAdapter(this);
      LogicalClause rootClause = discoveryFilter.getRootClause();
      if ((rootClause == null) || (rootClause.getClauses().size() == 0)) {
        if (discoveryFilter.getMaxRecords() <= QUERYALL_THRESHOLD) {
          LOGGER.finer("No filter was supplied, querying all...");
          logicalAdapter.appendSelectAll(rootQuery);
        } else {
          LOGGER.finer("No filter was supplied, query will not be executed.");
          bExecuteQuery = false;
        }
      } else {
        logicalAdapter.adaptLogicalClause(rootQuery,rootClause);
        if ((rootQuery.clauses() == null) && (rootQuery.clauses().size() > 0)) {
          bExecuteQuery = false;
        }
      }
      if (!bExecuteQuery) return;
    
       
      // execute the query and process the hits if required
     
      // set the sort option
      Sort sortOption = null;
      if (bProcessHits && (searcher.maxDoc() > 0)) {
        sortOption = makeSortOption(discoveryQuery);
      }
     
      // filters
      Filter filter = null;
     
      // make the access control filter
      MetadataAcl acl = new MetadataAcl(reqContext);
      AuthenticationStatus auth = reqContext.getUser().getAuthenticationStatus();
      boolean bAdmin = auth.getAuthenticatedRoles().hasRole("gptAdministrator");
      if (!bAdmin && !acl.isPolicyUnrestricted()) {
        String[] aclValues = acl.makeUserAcl();
        filter = new AclFilter(Storeables.FIELD_ACL,aclValues);
      }
View Full Code Here

    super(dataProcessor, unit);
  }

  @Override
  public void execute() {
    RequestContext context = RequestContext.extract(null);

    boolean success = false;
    long count = 0;
    Result result = null;
    final ExecutionUnit unit = getExecutionUnit();
    LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Starting pushing through unit: {0}", unit);
    if (isActive()) {
      getProcessor().onStart(getExecutionUnit());
    }

    ExecutionUnitHelper helper = new ExecutionUnitHelper(getExecutionUnit());
    // get report builder
    final ReportBuilder rp = helper.getReportBuilder();

    try {
      Protocol protocol = getExecutionUnit().getRepository().getProtocol();
      if (protocol instanceof HarvestProtocolAgs2Agp) {
        HarvestProtocolAgs2Agp ags2agp = (HarvestProtocolAgs2Agp)protocol;
        ArcGISInfo source = ags2agp.getSource();
        AgpDestination destination = ags2agp.getDestination();
       
        Ags2AgpCopy copy = new Ags2AgpCopy(source, destination){
          private long counter;
         
          @Override
          protected boolean syncItem(AgpItem sourceItem) throws Exception {
            counter++;
            String sourceUri = sourceItem.getProperties().getValue("id");
            try {
              boolean result = super.syncItem(sourceItem);
              rp.createEntry(sourceUri, result);
              LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Pushed item #{0} of source URI: \"{1}\" through unit: {2}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit});
              return result;
            } catch (AgpException ex) {
              LOGGER.log(Level.WARNING, "[SYNCHRONIZER] Failed pushing item #{0} of source URI: \"{1}\" through unit: {2}. Reason: {3}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit, ex.getMessage()});
              rp.createUnpublishedEntry(sourceUri, Arrays.asList(new String[]{ex.getMessage()}));
              return false;
            } catch (HttpClientException ex) {
              LOGGER.log(Level.WARNING, "[SYNCHRONIZER] Failed pushing item #{0} of source URI: \"{1}\" through unit: {2}. Reason: {3}", new Object[]{counter, sourceItem.getProperties().getValue("id"), unit, ex.getMessage()});
              rp.createUnpublishedEntry(sourceUri, Arrays.asList(new String[]{ex.getMessage()}));
              return false;
            } catch (Exception ex) {
              throw ex;
            }
          }

          @Override
          protected boolean doContinue() {
            boolean doContinue = Ags2AgpExecutor.this.isActive();
            if (!doContinue) {
              unit.setCleanupFlag(false);
            }
            return doContinue;
          }
        };
       
        copy.copy();
      }

      success = true;

      if (isActive()) {
        // save last sync date
        getExecutionUnit().getRepository().setLastSyncDate(rp.getStartTime());
        HrUpdateLastSyncDate updLastSyncDate = new HrUpdateLastSyncDate(context, unit.getRepository());
        updLastSyncDate.execute();
      }
    } catch (Exception ex) {
      rp.setException(ex);
      unit.setCleanupFlag(false);
      LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Failed pushing through unit: {0}. Cause: {1}", new Object[]{unit, ex.getMessage()});
      getProcessor().onIterationException(getExecutionUnit(), ex);
    } finally {
      if (!isShutdown()) {
        getProcessor().onEnd(unit, success);
        context.onExecutionPhaseCompleted();
      }
      if (result != null) {
        result.destroy();
      }
      LOGGER.log(Level.FINEST, "[SYNCHRONIZER] Completed pushing through unit: {0}. Obtained {1} records.", new Object[]{unit, count});
View Full Code Here

            IntegrationContextFactory icf = new IntegrationContextFactory();
            if (icf.isIntegrationEnabled()) {
              IntegrationContext ic = icf.newIntegrationContext();
              if (ic != null) {
                String samlUsername = ic.getUsernameFromSAMLToken(samlToken);
                RequestContext rc = context.getRequestContext();
                if ((rc != null) && (samlUsername != null)) {
                  User user = rc.getUser();
                  user.reset();
                  user.setCredentials(new UsernameCredential(samlUsername));
                  rc.newIdentityAdapter().authenticate(user);
                  context.getRequestOptions().getTransactionOptions().setPublicationMethod("seditor");

                  expr = "//tcExt:tcSecurity/tcExt:tcSecuredAction[tcExt:status='draft']";
                  Node ndTmp = (Node)xpath.evaluate(expr,ndSoapEnv,XPathConstants.NODE);
                  if (ndTmp != null) {
View Full Code Here

  @Override
  public void onStart(ExecutionUnit unit) {
    LOGGER.info("[SYNCHRONIZER] Starting processing metadata records through: " + unit);
    Date startTime = new Date();
    // create request context
    RequestContext context = RequestContext.extract(null);
    Long maxRepRecords = context.getApplicationConfiguration().getHarvesterConfiguration().getMaxRepRecords();
    if (maxRepRecords < 0) {
      maxRepRecords = null;
    }
    Long maxRepErrors = context.getApplicationConfiguration().getHarvesterConfiguration().getMaxRepErrors();
    if (maxRepErrors < 0) {
      maxRepErrors = null;
    }

    ExecutionUnitHelper helper = new ExecutionUnitHelper(unit);
   
    try {

      // initialize report builder
      Criteria criteria = unit.getCriteria();
      ReportBuilder rp = new ReportBuilder(
        criteria != null ? criteria.getMaxRecords() : null,
        maxRepRecords,
        maxRepErrors);
      rp.setStartTime(startTime);
      helper.setReportBuilder(rp);

      // prepare the publisher
      LocalDao localDao = new LocalDao(context);
      String uDN = localDao.readDN(unit.getRepository().getOwnerId());
      Publisher publisher = new Publisher(context, uDN);
      unit.setPublisher(publisher);

      // get all existing URI's for the specific harvesting site
      if (unit.getCleanupFlag()) {
        SourceUriArray sourceUris = new SourceUriArray(new String[]{"uri", "uuid"});
        helper.setSourceUris(sourceUris);
        collectExistingSourceURIs(context, unit.getRepository(), sourceUris);
      }

      // notify listeners
      listener.onHarvestStart(unit.getRepository());

    } catch (Exception ex) {
      LOGGER.log(Level.SEVERE, "[SYNCHRONIZER] Error starting metadata processing.", ex);
    } finally {
      context.onExecutionPhaseCompleted();
    }
  }
View Full Code Here

   * @param response the servlet response
   */
  @Override
  protected void doPost(HttpServletRequest request, HttpServletResponse response)
    throws ServletException, IOException {
    RequestContext context = null;
    try {
     
      LOGGER.finer("Query string="+request.getQueryString());
      String sEncoding = request.getCharacterEncoding();
      if ((sEncoding == null) || (sEncoding.trim().length() == 0)) {
        request.setCharacterEncoding("UTF-8");
      }
      context = RequestContext.extract(request);
      String action = Val.chkStr(request.getParameter("action"));
     
      if (action.equalsIgnoreCase("isSynchronizerRunning")) {
        boolean isRunning = LuceneIndexSynchronizer.RUNNING;
        this.writeCharacterResponse(response,
            ""+isRunning,"UTF-8","text/plain; charset=UTF-8");
        return;
      }
     
      context.getObjectMap().put("lucene.useRemoteWriter",false);
      StringAttributeMap params = context.getCatalogConfiguration().getParameters();
      String param = Val.chkStr(params.getValue("lucene.useSingleSearcher"));
      boolean useSingleWriter = param.equalsIgnoreCase("true");
      param = Val.chkStr(params.getValue("lucene.useLocalWriter"));
      boolean bUseLocalWriter = !param.equalsIgnoreCase("false");
     
      param = Val.chkStr(params.getValue("lucene.useRemoteWriter"));
      boolean useRemoteWriter = param.equalsIgnoreCase("true");
      String remoteWriterUrl = Val.chkStr(params.getValue("lucene.remoteWriterUrl"));

      boolean bOk = true;
      if (!useSingleWriter || !bUseLocalWriter) {
        bOk = false;
        String s = "Inconsistent configuration parameters,"+
          " lucene.useSingleWriter lucene.useLocalWriter";
        LOGGER.severe(s);
        response.sendError(500,"Inconsistent configuration parameters on server.");
      }
     
      if (bOk) {
        String sIds = Val.chkStr(request.getParameter("ids"));
        String[] ids = sIds.split(",");
       
        if (action.equalsIgnoreCase("delete")) {
          this.executeDelete(request,response,context,ids);
       
        } else if (action.equalsIgnoreCase("publish")) {
          this.executePublish(request,response,context,ids);
         
        } else if (action.equalsIgnoreCase("runSynchronizer")) {
          StringAttributeMap syncParams = new StringAttributeMap();
          //syncParams.set("feedbackSeconds","30");
          LuceneIndexSynchronizer lis = new LuceneIndexSynchronizer(syncParams);
          lis.syncronize();
         
        } else if (action.equalsIgnoreCase("touch")) {
          LuceneIndexAdapter adapter = new LuceneIndexAdapter(context);
          adapter.touch();

        } else {
          String s = "Unrecognized action: "+action;
          LOGGER.log(Level.WARNING,s);
        }
      }
     
    } catch (Throwable t) {
      String sErr = "Exception occured while processing servlet request.";
      LOGGER.log(Level.SEVERE,sErr,t);
      response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
     
    } finally {
      if (context != null) context.onExecutionPhaseCompleted();
    }
  }
View Full Code Here

TOP

Related Classes of com.esri.gpt.framework.context.RequestContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.