Package org.apache.accumulo.examples.wikisearch.parser

Examples of org.apache.accumulo.examples.wikisearch.parser.QueryParser


     
      // Step 1: Parse the query
      if (log.isDebugEnabled()) {
        log.debug("QueryParser");
      }
      QueryParser qp = new QueryParser();
      qp.execute(this.updatedQuery); // validateOptions updates the updatedQuery
     
      // need to build the query tree based on jexl parsing.
      // Step 2: refactor QueryTree - inplace modification
      if (log.isDebugEnabled()) {
        log.debug("transformTreeNode");
      }
      TreeNode tree = qp.getIteratorTree();
      this.root = transformTreeNode(tree);
     
      if (log.isDebugEnabled()) {
        log.debug("refactorTree");
      }
View Full Code Here


    abstractQueryLogic.start();
   
    StopWatch parseQuery = new StopWatch();
    parseQuery.start();
   
    QueryParser parser;
    try {
      if (log.isDebugEnabled()) {
        log.debug("ShardQueryLogic calling QueryParser.execute");
      }
      parser = new QueryParser();
      parser.execute(queryString);
    } catch (org.apache.commons.jexl2.parser.ParseException e1) {
      throw new IllegalArgumentException("Error parsing query", e1);
    }
    int hash = parser.getHashValue();
    parseQuery.stop();
    if (log.isDebugEnabled()) {
      log.debug(hash + " Query: " + queryString);
    }
   
    Set<String> fields = new HashSet<String>();
    for (String f : parser.getQueryIdentifiers()) {
      fields.add(f);
    }
    if (log.isDebugEnabled()) {
      log.debug("getQueryIdentifiers: " + parser.getQueryIdentifiers().toString());
    }
    // Remove any negated fields from the fields list, we don't want to lookup negated fields
    // in the index.
    fields.removeAll(parser.getNegatedTermsForOptimizer());
   
    if (log.isDebugEnabled()) {
      log.debug("getQueryIdentifiers: " + parser.getQueryIdentifiers().toString());
    }
    // Get the mapping of field name to QueryTerm object from the query. The query term object
    // contains the operator, whether its negated or not, and the literal to test against.
    Multimap<String,QueryTerm> terms = parser.getQueryTerms();
   
    // Find out which terms are indexed
    // TODO: Should we cache indexed terms or does that not make sense since we are always
    // loading data.
    StopWatch queryMetadata = new StopWatch();
    queryMetadata.start();
    Map<String,Multimap<String,Class<? extends Normalizer>>> metadataResults;
    try {
      metadataResults = findIndexedTerms(connector, auths, fields, typeFilter);
    } catch (Exception e1) {
      throw new RuntimeException("Error in metadata lookup", e1);
    }
   
    // Create a map of indexed term to set of normalizers for it
    Multimap<String,Normalizer> indexedTerms = HashMultimap.create();
    for (Entry<String,Multimap<String,Class<? extends Normalizer>>> entry : metadataResults.entrySet()) {
      // Get the normalizer from the normalizer cache
      for (Class<? extends Normalizer> clazz : entry.getValue().values()) {
        indexedTerms.put(entry.getKey(), normalizerCacheMap.get(clazz));
      }
    }
    queryMetadata.stop();
    if (log.isDebugEnabled()) {
      log.debug(hash + " Indexed Terms: " + indexedTerms.toString());
    }
   
    Set<String> orTerms = parser.getOrTermsForOptimizer();
   
    // Iterate over the query terms to get the operators specified in the query.
    ArrayList<String> unevaluatedExpressions = new ArrayList<String>();
    boolean unsupportedOperatorSpecified = false;
    for (Entry<String,QueryTerm> entry : terms.entries()) {
View Full Code Here

TOP

Related Classes of org.apache.accumulo.examples.wikisearch.parser.QueryParser

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.