Package org.elasticsearch.client.action.search

Examples of org.elasticsearch.client.action.search.SearchRequestBuilder


                    closed = true;
                    return;
                }
                try {
                    String indexName = nextIndex();
                    SearchRequestBuilder builder = client.client().prepareSearch(indexName);
                    if (ThreadLocalRandom.current().nextBoolean()) {
                        builder.addSort("num", SortOrder.DESC);
                    } else if (ThreadLocalRandom.current().nextBoolean()) {
                        // add a _score based sorting, won't do any sorting, just to test...
                        builder.addSort("_score", SortOrder.DESC);
                    }
                    if (ThreadLocalRandom.current().nextBoolean()) {
                        builder.setSearchType(SearchType.DFS_QUERY_THEN_FETCH);
                    }
                    int size = Math.abs(ThreadLocalRandom.current().nextInt()) % numberOfHits;
                    builder.setSize(size);
                    if (ThreadLocalRandom.current().nextBoolean()) {
                        // update from
                        builder.setFrom(size / 2);
                    }
                    String value = nextFieldValue();
                    builder.setQuery(termQuery("field", value));
                    searchCounter.incrementAndGet();
                    SearchResponse searchResponse = builder.execute().actionGet();
                    if (searchResponse.failedShards() > 0) {
                        logger.warn("failed search " + Arrays.toString(searchResponse.shardFailures()));
                    }
                    // verify that all come from the requested index
                    for (SearchHit hit : searchResponse.hits()) {
View Full Code Here


    @Override public MultiGetRequestBuilder prepareMultiGet() {
        return new MultiGetRequestBuilder(this);
    }

    @Override public SearchRequestBuilder prepareSearch(String... indices) {
        return new SearchRequestBuilder(this).setIndices(indices);
    }
View Full Code Here

        return QueryBuilders.queryString(query).defaultOperator(QueryStringQueryBuilder.Operator.AND).defaultField("_all");
    }
   
    @SuppressWarnings("PMD.NcssMethodCount")
    SearchResponse startScroll() {
        SearchRequestBuilder searchRequestBuilder = client.prepareSearch(indexName);
        searchRequestBuilder.setSearchType(SearchType.SCAN);
        searchRequestBuilder.setQuery(createQuery());
        searchRequestBuilder.setSize(BATCH_SIZE);
        searchRequestBuilder.setExplain(false);
        searchRequestBuilder.setNoFields();
        searchRequestBuilder.setVersion(true);
        searchRequestBuilder.setScroll(TimeValue.timeValueMinutes(SCROLL_TIME_IN_MINUTES));

        return searchRequestBuilder.execute().actionGet();
    }
View Full Code Here

      int docsPerScroll = 500; // (note this is multiplied by the number of primary shards, will normally be 5 per community)     
      if (maxDocs < docsPerScroll) {
        docsPerScroll = maxDocs; // (can't do any better than this because we don't know what the distribution across shards will be)
      }
     
      SearchRequestBuilder searchOptions = queryInfo.indexMgr.getSearchOptions();
      searchOptions.setSize(docsPerScroll);
      searchOptions.setSearchType(SearchType.SCAN);
      searchOptions.setScroll("1m");
     
      SearchResponse rsp = queryInfo.indexMgr.doQuery(queryInfo.queryObj, searchOptions);
      String scrollId = rsp.getScrollId();
     
      final int SPLIT_SIZE = 4000;
View Full Code Here

        }
        sb.append("/document_index");
       
        ElasticSearchManager esm = ElasticSearchManager.getIndex(sb.toString());
               
        SearchRequestBuilder searchOptions = esm.getSearchOptions();
        BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
        boolQuery.must(QueryBuilders.rangeQuery(DocumentPojo.created_).from(cleanseStartTime));
        boolQuery.must(QueryBuilders.termQuery(DocumentPojo.sourceKey_, sp.getKey() ));
        searchOptions.setSize(200); // (note this is multiplied by the number of primary shards)
        searchOptions.setSearchType(SearchType.SCAN);
        searchOptions.setScroll("10m");
        SearchResponse rsp = esm.doQuery(boolQuery, searchOptions);
        String scrollId = rsp.getScrollId();
        int nSkip = 0;
       
        for (;;) // Until no more hits
        {
          rsp = esm.doScrollingQuery(scrollId, "10m");
          SearchHit[] docs = rsp.getHits().getHits();
          scrollId = rsp.getScrollId();
         
          if ((null == docs) || (0 == docs.length)) {
            break;
          }         
          if (docs.length > 100) { // just display large checks)
            logger.info("Checking ES docs for large source=" + sp.getKey() + " source: " + source_index + "/" + source_count + " from " + nSkip + " to " + (nSkip+docs.length) );
          }
         
          //Check all solr docs against mongodb
         
          for (SearchHit hit: docs)
          {
            String idStr = hit.getId();
            boolean found = true; //(fail closed!)
            if (null == dbCache) {
              //OBSOLETED, USE DBCACHE INSTEAD (WHERE AVAILABLE):
              ObjectId id = new ObjectId(idStr);
              BasicDBObject query = new BasicDBObject(DocumentPojo._id_, id);
              query.put(DocumentPojo.sourceKey_, sp.getKey()); // (ensures uses only the right shard)
              DBObject dbo = documentDb.findOne(query, queryFields);
              found = (dbo != null);
            }//TESTED
            else {
              found = dbCache.contains(idStr);
            }//TESTED
            if (!found)
            {       
              ObjectId id = new ObjectId(idStr);
              DocumentPojo doc = new DocumentPojo();
              doc.setId(id);
              doc.setIndex(hit.getIndex() + "/document_index");
              docs_to_remove.add(doc);
              logger.info("db sync removing doc: " + id + "/" + hit.getIndex() + "/" + source_index + " not found in mongo");
              fixcount++;
            } // end if not found
          } // end loop over docs to check
         
          nSkip += docs.length;
        }// until no more hits
        if (!docs_to_remove.isEmpty()) {
          storeManager.removeFromSearch(docs_to_remove);
          docs_to_remove.clear();
        }
       
        //CHECK OLD FEEDS 10 at atime
        int iteration = 1;
        boolean removedAll = true;
        while (removedAll )
        {
          int rows = iteration*iteration*10;//exponential scaling 10x^2
          iteration++;
          int oldfixes = 0;
         
          //get old docs from es
          SearchRequestBuilder searchOptionsOLD = esm.getSearchOptions();
          BoolQueryBuilder boolQueryOLD = QueryBuilders.boolQuery();
          boolQueryOLD.must(QueryBuilders.rangeQuery(DocumentPojo.created_).from(cleanseStartTime));
          boolQueryOLD.must(QueryBuilders.termQuery(DocumentPojo.sourceKey_, sp.getKey()));
          searchOptionsOLD.addSort(DocumentPojo.created_, SortOrder.ASC);
          searchOptionsOLD.setSize(rows);
          SearchResponse rspOLD = esm.doQuery(boolQueryOLD, searchOptionsOLD);
          SearchHit[] docsOLD = rspOLD.getHits().getHits();
         
          //Check all solr docs against mongodb
         
View Full Code Here

    }//TESTED     
    escapedterm = sb.toString();     

    // Create the search query

    SearchRequestBuilder searchOptions = gazIndex.getSearchOptions();
    BaseQueryBuilder queryObj1 = QueryBuilders.queryString(escapedterm).defaultField(EntityFeaturePojoIndexMap.Mapping.RootObject.RootProperties.alias_pri_);

    String[] communityIdStrs = SocialUtils.getCommunityIds(userIdStr, communityIdStrList);
    BaseQueryBuilder queryObj2 = QueryBuilders.boolQuery().should(QueryBuilders.termsQuery(EntityFeaturePojo.communityId_, communityIdStrs));

    BaseQueryBuilder queryObj = QueryBuilders.boolQuery().must(queryObj1).must(queryObj2);

    searchOptions.addSort(EntityFeaturePojo.doccount_, SortOrder.DESC);
    searchOptions.addFields(EntityFeaturePojo.disambiguated_name_, EntityFeaturePojo.doccount_,
        EntityFeaturePojo.type_, EntityFeaturePojo.dimension_);
    if (bIncludeGeo) {
      searchOptions.addFields(EntityFeaturePojo.geotag_);
      searchOptions.addFields(EntityFeaturePojo.ontology_type_);
    }
    if (bIncludeLinkdata) {
      searchOptions.addFields(EntityFeaturePojo.linkdata_);     
    }

    // Initial alias handling:

    AliasLookupTable aliasTable = null;
    HashMap<String, SearchSuggestPojo> aliasResults = null;
    if (!bWantNoAlias) {
      AliasManager aliasManager = AliasManager.getAliasManager();
      if (null != aliasManager) {
        aliasTable = aliasManager.getAliasLookupTable(communityIdStrList, communityIdStrs, null, userIdStr);
      }
    }
    //TESTED

    // Also create an internal Lucene index for aliases, in case any of them do not have actual entities representing them
    List<EntityFeaturePojo> extraEntries = null;
    if (null != aliasTable) {
      extraEntries = checkAliasMasters(aliasTable, escapedterm);
    }
    // (end initial alias handling)

    int nDesiredSize = 20;
    if (null == aliasTable) {   
      searchOptions.setSize(nDesiredSize); // will forward all 20
    }
    else {
      searchOptions.addFields(EntityFeaturePojo.index_);
      searchOptions.setSize(3*nDesiredSize); // will forward top 20 after de-aliasing

      aliasResults = new HashMap<String, SearchSuggestPojo>();
      // (We use this to ensure we only include each entity once after aliasing)
    }
    //TESTED
View Full Code Here

          aliasTable = aliasManager.getAliasLookupTable(communityIdStrList, communityIdStrs, null, userIdStr);
        }
      }//TESTED                   

      ElasticSearchManager esm = ElasticSearchManager.getIndex(assocIndex_);
      SearchRequestBuilder searchOptions = esm.getSearchOptions();
      BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
      boolean bExtraQueryTerms = false;
      String term = "";
      if ( !ent1.equals("null") )
      {
        if ( field.equals(AssociationFeaturePojo.entity1_) )
          term = ent1;
        else {
          bExtraQueryTerms = true;
          EntityFeaturePojo alias = null;
          if (null != aliasTable) {
            alias = aliasTable.getAliasMaster(ent1);
          }
          if (null != alias) { // Found!
            boolQuery.must(QueryBuilders.termsQuery(AssociationFeaturePojo.entity1_index_, alias.getAlias().toArray()));
          }
          else {
            boolQuery.must(QueryBuilders.termQuery(AssociationFeaturePojo.entity1_index_, ent1));
          }//TESTED
        }
      }
      if ( !verb.equals("null") )
      {
        if ( field.equals(AssociationFeaturePojo.verb_) )
          term = verb;
        else
        {
          bExtraQueryTerms = true;
          boolQuery.must(QueryBuilders.queryString(new StringBuffer("+").append(verb.replaceAll("\\s+", " +")).toString()).
              defaultField(AssociationFeaturePojo.verb_));
        }
      }
      if ( !ent2.equals("null") )
      {
        if ( field.equals(AssociationFeaturePojo.entity2_) )
          term = ent2;
        else {
          bExtraQueryTerms = true;
          EntityFeaturePojo alias = null;
          if (null != aliasTable) {
            alias = aliasTable.getAliasMaster(ent2);
          }
          if (null != alias) { // Found!
            boolQuery.must(QueryBuilders.termsQuery(AssociationFeaturePojo.entity2_index_, alias.getAlias().toArray()));
          }
          else {
            boolQuery.must(QueryBuilders.termQuery(AssociationFeaturePojo.entity2_index_, ent2));
          }
        }//TESTED (cut and paste from entity1)
     

      String escapedterm = null;
      StandardTokenizer st = new StandardTokenizer(Version.LUCENE_30, new StringReader(ContentUtils.stripDiacritics(term)));
      CharTermAttribute termAtt = st.addAttribute(CharTermAttribute.class);
      StringBuffer sb = new StringBuffer();
      try {
        try {
          st.reset();
          while (st.incrementToken()) {
            if (sb.length() > 0) {
              sb.append(" +");
            }
            else {
              sb.append('+');           
            }
            sb.append(luceneEncodeTerm(termAtt.toString()));
          }
        }
        finally {
          st.close();
        }
      } catch (IOException e) {
        e.printStackTrace();
      }     
      if (!term.endsWith(" ") || (0 == sb.length())) { // Could be in the middle of typing, stick a * on the end
        sb.append('*');
      }//TESTED     

      escapedterm = sb.toString();
     
      // Also create an internal Lucene index for aliases, in case any of them do not have actual entities representing them
      List<EntityFeaturePojo> extraEntries = null;
      BoolQueryBuilder extraQueryTerms = null;
      if (field.startsWith("entity")) {
        String indexField = field.startsWith("entity1") ? "entity1_index" : "entity2_index";
        if (null != aliasTable) {
          extraEntries = checkAliasMasters(aliasTable, escapedterm);
        }
        if (null != extraEntries) {
          extraQueryTerms = QueryBuilders.boolQuery();
          int nExtraTerms = 0;
          Iterator<EntityFeaturePojo> aliasIt = extraEntries.iterator();
          while (aliasIt.hasNext()) {
            EntityFeaturePojo alias = aliasIt.next();           
            nExtraTerms += alias.getAlias().size();
           
            if (!bExtraQueryTerms && (nExtraTerms > 20)) { // If not filtering on event type we'll be more aggressive
              break;
            }//TESTED
            if (bExtraQueryTerms && (nExtraTerms > 60)) { // If the number of terms gets too large bail anyway
              break;
            }//TESTED
           
            extraQueryTerms.should(QueryBuilders.termsQuery(indexField, alias.getAlias().toArray()));
            aliasIt.remove();
           
          }//end loop over entities
        }//if found new aliases
       
      }//(if this is an entity lookup) TESTED - including breaking out because of # of terms
     
      // (end initial alias handling)
     
      if (null == extraQueryTerms) {
        boolQuery.must(QueryBuilders.queryString(escapedterm).defaultField(field));
      }
      else {//(in this case combine the escaped term with the aliases
        extraQueryTerms.should(QueryBuilders.queryString(escapedterm).defaultField(field));
        boolQuery.must(extraQueryTerms);
      }//TESTED
      boolQuery.must(QueryBuilders.termsQuery(AssociationFeaturePojo.communityId_, communityIdStrs));

      searchOptions.addSort(AssociationFeaturePojo.doccount_, SortOrder.DESC);

      // Work out which fields to return:
      //TODO (INF-1234) need to work out what to do with quotations and similar here (ie entityX without entityX_index)
      String returnfield;
      boolean bReturningEntities = true;
      if ( field.equals(AssociationFeaturePojo.entity1_) ) {
        returnfield = AssociationFeaturePojo.entity1_index_;
        searchOptions.addFields( AssociationFeaturePojo.entity1_index_, AssociationFeaturePojo.doccount_);
      }
      else if ( field.equals(AssociationFeaturePojo.entity2_)) {
        returnfield = AssociationFeaturePojo.entity2_index_;
        searchOptions.addFields( AssociationFeaturePojo.entity2_index_, AssociationFeaturePojo.doccount_);
      }
      else {
        bReturningEntities = false;
        returnfield = AssociationFeaturePojo.verb_;
        searchOptions.addFields( AssociationFeaturePojo.verb_, AssociationFeaturePojo.verb_category_,  AssociationFeaturePojo.doccount_);
      }

      int nNumSuggestionsToReturn = 20;
      if (bReturningEntities && (null != aliasTable)) {
        searchOptions.setSize(3*nNumSuggestionsToReturn); // we're going to remove some duplicates so get more than we need
      }
      else { // normal case
        searchOptions.setSize(nNumSuggestionsToReturn);
      }

      SearchResponse rsp = esm.doQuery(boolQuery, searchOptions);
      SearchHit[] docs = rsp.getHits().getHits();

View Full Code Here

   
    //(timing)
    long nQuerySetupTime = System.currentTimeMillis();
 
    ElasticSearchManager indexMgr = getIndexManager(communityIdStrs);
    SearchRequestBuilder searchSettings = indexMgr.getSearchOptions();

    StringBuffer querySummary = new StringBuffer();
    BaseQueryBuilder queryObj = null;
    InternalTempFilterInfo tempFilterInfo = null;
    try {
      queryObj = getBaseQuery(query, communityIdStrs, communityIdStrList, userIdStr, querySummary);
      if (null == queryObj) { // only occurs if has 1 element with ftext starting $cache:
        return getSavedQueryInstead(query.qt.get(0).ftext.substring(7), communityIdStrs, query); // (step over cache preamble)
      }
      tempFilterInfo = getBaseFilter(query, communityIdStrs);
    }
    catch (Exception e) {
      Globals.populateStackTrace(errorString, e);
      if (null != e.getCause()) {
        errorString.append("[CAUSE=").append(e.getCause().getMessage()).append("]");
        Globals.populateStackTrace(errorString, e.getCause());       
      }
      errorString.append(": " + e.getMessage());
      return null;
    }
   
    //DEBUG
    //querySummary.append(new Gson().toJson(query, AdvancedQueryPojo.class));
   
  // 0.4] Pre-Lucene Scoring
   
    // 0.4.1] General
   
    // Different options:
    //   a] Get the most recent N documents matching the query, score post-query
    //   b] Get the N highest (Lucene) scoring documents, incorporate significance post-query if desired
    // In both cases, N depends on whether significance calculation is taking place (and on the "skip" param)
   
    int nRecordsToOutput = query.output.docs.numReturn;
    int nRecordsToSkip = query.output.docs.skip;
    int nRecordsToGet = query.score.numAnalyze;

    final int nMAXRECORDSTOOUTPUT = 10000;
    final int nMAXRECORDSTOGET = 20000;
   
    // Some sanity checking on doc numbers:
    if (nRecordsToOutput > nMAXRECORDSTOOUTPUT) { // Upper limit...
      errorString.append(": Max # docs to return is 10000.");
      return null;
    }
    if (nRecordsToGet < nRecordsToOutput) {
      nRecordsToGet = nRecordsToOutput;
    }
    else if (nRecordsToGet > nMAXRECORDSTOGET) { // Upper limit...
      nRecordsToGet = nMAXRECORDSTOGET; // (we can do something sensible with this so carry on regardless)
    }
   
    boolean bUseSignificance = (query.score.sigWeight > 0.0);
    boolean bNeedExtraResultsForEnts =
      ((query.output.aggregation != null) && (query.output.aggregation.entsNumReturn != null) && (query.output.aggregation.entsNumReturn > 0))
        ||
      (query.output.docs.enable && (query.output.docs.numReturn > 0) && (query.output.docs.ents) && (query.score.scoreEnts));
   
    if (bUseSignificance || bNeedExtraResultsForEnts) {
     
      // Some logic taken from the original "knowledge/search"
      while ( (nRecordsToSkip + nRecordsToOutput > nRecordsToGet) && (nRecordsToGet <= nMAXRECORDSTOGET) )
      {
        nRecordsToGet += nRecordsToGet;
      }
      if (nRecordsToGet > nMAXRECORDSTOGET) {
        errorString.append(": Can only skip through to 20000 documents.");       
        return null;
      }
      searchSettings.setSize(nRecordsToGet);
     
      //TESTED
    }
    else if (query.output.docs.enable) { // In this case we just need the minimum number of records
      // (whether searching by date or by relevance)
      searchSettings.setFrom(nRecordsToSkip);
      nRecordsToSkip = 0; // (so it isn't double counted in the processing module)
      nRecordsToGet = nRecordsToOutput;
      searchSettings.setSize(nRecordsToGet);
      //TESTED
    }
    else { // In thise case we're just outputting aggregations, and not even ones that come from the docs
      nRecordsToGet = 0; // (use this variable everywhere where we care about bring docs back either to output or for suitable aggregation)
      searchSettings.setSize(0);
    }
   
    // Sort on score if relevance is being used   
   
    if (nRecordsToGet > 0) {
      if (query.score.relWeight > 0.0) { // (b) above
        // Using score is default, nothing to do
      }
      else { // (a) above
        // Debug code, if rel weight negative then use date to check Lucene score is better...
        if (query.score.relWeight < 0.0) {
          query.score.relWeight = -query.score.relWeight;
        }
        // Set Lucene to order:
        searchSettings.addSort(DocumentPojo.publishedDate_, SortOrder.DESC);
      }//TOTEST
    }//(if docs aren't enabled, don't need to worry about sorting)
   
    // 0.4.2] Prox scoring (needs to happen after [0.3]

    // Add proximity scoring:
    boolean bLowAccuracyDecay = false;
    if ((nRecordsToGet > 0) || (null == _scoringParams.adjustAggregateSig) || _scoringParams.adjustAggregateSig) {
      // (ie if we're getting docs or applying scores to entities)
     
      if (!_aggregationAccuracy.equals("full")) {
        bLowAccuracyDecay = true;
      }
      queryObj = addProximityBasedScoring(queryObj, searchSettings, query.score, tempFilterInfo.parentFilterObj, bLowAccuracyDecay);
     
      if (null == _scoringParams.adjustAggregateSig) { // auto-decide .. if ftext is set and is non-trivial
        if ((null != query.score.timeProx) || (null != query.score.geoProx)) {
            // (These are set to null above if badly formed)
          _scoringParams.adjustAggregateSig = true;         
        }
      }
     
    }// (else not worth the effort) 
   
    // 0.4.3] Source weightings (if any)
   
    queryObj = applyManualWeights(queryObj, query.score);
   
  // 0.5] Pre-lucene output options
   
    // only return the id field and score
    // (Both _id and score come back as default options, SearchHit:: getId and getScore, don't need anything else)

    // Facets
   
    // (These are needed for the case where we need to perform aggregations manually)
    Integer manualEntsNumReturn = null;
    Integer manualEventsNumReturn = null;
    Integer manualFactsNumReturn = null;
    Integer manualGeoNumReturn = null;
   
    //DEBUG
    //System.out.println(new Gson().toJson(query.output.aggregation));
   
    if ((null != query.output.aggregation) && (null != query.output.aggregation.raw)) { // Like query, specify raw aggregation (Facets)
      // Gross raw handling for facets
      if ((null != query.raw) && (null != query.raw.query)) {
        // Don't currently support raw query and raw facets because I can't work out how to apply
        // the override on group/source!
        errorString.append(": Not currently allowed raw query and raw facets");
        return null;
      }
      else { // Normal code
        searchSettings.setFacets(query.output.aggregation.raw.getBytes());
      }
    }
    else { // Apply various aggregation (=="facet") outputs to searchSettings
      boolean bSpecialCase = (null != query.raw) && (null != query.raw.query);
     
      if (!_aggregationAccuracy.equals("full")) {
        if (null != query.output.aggregation) {
          if (_aggregationAccuracy.equals("low")) {
            manualEntsNumReturn = query.output.aggregation.entsNumReturn;
            manualEventsNumReturn = query.output.aggregation.eventsNumReturn;
            manualFactsNumReturn = query.output.aggregation.factsNumReturn;
            manualGeoNumReturn = query.output.aggregation.geoNumReturn;
          }                   
          query.output.aggregation.entsNumReturn = null;
          query.output.aggregation.eventsNumReturn = null;
          query.output.aggregation.factsNumReturn = null;
          query.output.aggregation.geoNumReturn = null;
          // (allow time aggregation)
          // (allow source aggregation)
        }
      }
      AggregationUtils.parseOutputAggregation(query.output.aggregation, _aliasLookup,
                            tempFilterInfo.entityTypeFilterStrings, tempFilterInfo.assocVerbFilterStrings,
                              searchSettings, bSpecialCase?tempFilterInfo.parentFilterObj:null);

      // In partial accuracy case, restore aggregation
      if (null != manualEntsNumReturn) {
        query.output.aggregation.entsNumReturn = manualEntsNumReturn;
      }
      if (null != manualEventsNumReturn) {
        query.output.aggregation.eventsNumReturn = manualEventsNumReturn;
      }
      if (null != manualFactsNumReturn) {
        query.output.aggregation.factsNumReturn = manualFactsNumReturn;
      }
      if (null != manualGeoNumReturn) {
        query.output.aggregation.geoNumReturn = manualGeoNumReturn;
      }
      //TESTED
    }
    //TESTED x2     
   
    //(timing)
    nQuerySetupTime = System.currentTimeMillis() - nQuerySetupTime;
   
  // 0.6] Perform Lucene query
   
    // 0.6.1: query extensions: pre-query hook
    ArrayList<IQueryExtension> queryExtensions = null;
    if (null != _queryExtensions) {
      queryId = new ObjectId();
      queryExtensions = new ArrayList<IQueryExtension>(_queryExtensions.size());
      for (Class<IQueryExtension> queryExtensionClass: _queryExtensions) {
        // Don't catch any exceptions thrown here - let it bubble upwards
        IQueryExtension queryExtension = queryExtensionClass.newInstance();
        queryExtension.preQueryActivities(queryId, query, communityIdStrs);
        queryExtensions.add(queryExtension);
      }
    }//TESTED (see test.QueryExtensionsTestCode)
   
    // Built-in federated query engine ...
    if (null != _federatedQueryCache) {
      // 2 modes:
      // 1) If srcInclude is true(default) then check each source vs the table
      // 2) If srcInclude is false, or no sources specified, then check each community vs the table
     
      // 1:
      if ((null != query.input) && (null != query.input.sources) && ((null == query.input.srcInclude) || query.input.srcInclude))
      {
        for (String srcKey: query.input.sources) {
          FederatedQueryInMemoryCache fedQueryCacheEl = _federatedQueryCache.get(srcKey);
          if (null != fedQueryCacheEl) {
            if (null == this._builtInFederatedQueryEngine) {
              _builtInFederatedQueryEngine = new SimpleFederatedQueryEngine();
            }
            _builtInFederatedQueryEngine.addEndpoint(fedQueryCacheEl.source);
          }
        }
      }//TESTED (//TESTED (http://localhost:8184/knowledge/document/query/53ab42a2e4b04bcfe2de4387?qt[0].entity=%22garyhart.com/externaldomain%22&output.docs.numReturn=10&input.sources=inf...federated.externaldomain.&input.srcInclude=true))
     
      // 2:
      else { //Get federated queries from communities
        HashSet<String> excludeSrcs = null;
        for (String commIdStr: communityIdStrs) {
          FederatedQueryInMemoryCache fedQueryCacheEl = _federatedQueryCache.get(commIdStr);
          if (null != fedQueryCacheEl) {
           
            if ((null != query.input) && (null != query.input.sources)) { // (there are exclude sources)
              if (null == excludeSrcs) {
                excludeSrcs = new HashSet<String>(query.input.sources);
              }
            }//TESTED (http://localhost:8184/knowledge/document/query/53ab42a2e4b04bcfe2de4387?qt[0].entity=%22garyhart.com/externaldomain%22&output.docs.numReturn=10&input.sources=inf...federated.externaldomain.&input.srcInclude=false)
           
            for (Map.Entry<String, SourceFederatedQueryConfigPojo> fedQueryKV: fedQueryCacheEl.sources.entrySet()) {
              if ((null == excludeSrcs) || !excludeSrcs.contains(fedQueryKV.getKey())) {
                if (null == this._builtInFederatedQueryEngine) {
                  _builtInFederatedQueryEngine = new SimpleFederatedQueryEngine();
                }
                _builtInFederatedQueryEngine.addEndpoint(fedQueryKV.getValue());
              }
            }
          }
        }//TESTED (by hand)
      }
      if (null != _builtInFederatedQueryEngine) {
        _builtInFederatedQueryEngine.preQueryActivities(queryId, query, communityIdStrs);
      }
    }   
   
    // 0.6.2: the main query
    if ((null != query.explain) && query.explain) { // (for diagnostic - will return lucene explanation)
      searchSettings.setExplain(true);
    }
   
    SearchResponse queryResults = null;

    // (_source can now be enabled, so this is necessary to avoid returning it)
    searchSettings.addFields();
    if ((null != query.raw) && (null != query.raw.query))
    {
      // (Can bypass all other settings)       
      searchSettings.setQuery(query.raw.query);
      queryResults = indexMgr.doQuery(null, tempFilterInfo.parentFilterObj, searchSettings);
    }//TESTED '{ "raw": { "match_all": {} } }'
    else
    {
      // Where I can, use the source filter as part of the query so that
View Full Code Here

    public String getIndexType() {
        return "tweet";
    }

    SearchResponse query(MyQuery query) {
        SearchRequestBuilder builder = client.prepareSearch(getIndexName());
        // merge results from shards:
        builder.setSearchType(SearchType.QUERY_THEN_FETCH);
        return query.initRequestBuilder(builder).execute().actionGet();
    }
View Full Code Here

//    return _superClient.preparePercolate(arg0, arg1);
//  }

  //@Override
  public org.elasticsearch.client.action.search.SearchRequestBuilder prepareSearch(String... arg0) {
    return new SearchRequestBuilder(_superClient.prepareSearch(arg0));
  }
View Full Code Here

TOP

Related Classes of org.elasticsearch.client.action.search.SearchRequestBuilder

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.