Package org.apache.stanbol.entityhub.servicesapi.query

Examples of org.apache.stanbol.entityhub.servicesapi.query.FieldQuery


        // the Entityhub
        FieldQueryFactory queryFactory = site == null ? entityhub.getQueryFactory() : site.getQueryFactory();

        log.trace("Will use a query-factory of type [{}].", queryFactory.getClass().toString());

        FieldQuery query = queryFactory.createFieldQuery();

        // replace spaces with plus to create an AND search for all words in the
        // name!
        Constraint labelConstraint;
        // TODO: make case sensitivity configurable
        boolean casesensitive = false;
        String namedEntityLabel = casesensitive ? namedEntity.getName() : namedEntity.getName().toLowerCase();
        if (language != null) {
            // search labels in the language and without language
            labelConstraint = new TextConstraint(namedEntityLabel, casesensitive, language, null);
        } else {
            labelConstraint = new TextConstraint(namedEntityLabel, casesensitive);
        }
        query.setConstraint(nameField, labelConstraint);
        if (OntologicalClasses.DBPEDIA_PERSON.equals(namedEntity.getType())) {
            if (personState) {
                if (personType != null) {
                    query.setConstraint(RDF_TYPE.getUnicodeString(), new ReferenceConstraint(personType));
                }
                // else no type constraint
            } else {
                // ignore people
                return Collections.emptyList();
            }
        } else if (DBPEDIA_ORGANISATION.equals(namedEntity.getType())) {
            if (orgState) {
                if (orgType != null) {
                    query.setConstraint(RDF_TYPE.getUnicodeString(), new ReferenceConstraint(orgType));
                }
                // else no type constraint
            } else {
                // ignore people
                return Collections.emptyList();
            }
        } else if (OntologicalClasses.DBPEDIA_PLACE.equals(namedEntity.getType())) {
            if (this.placeState) {
                if (this.placeType != null) {
                    query.setConstraint(RDF_TYPE.getUnicodeString(), new ReferenceConstraint(placeType));
                }
                // else no type constraint
            } else {
                // ignore people
                return Collections.emptyList();
            }
        }
        query.setLimit(Math.max(20, this.numSuggestions * 3));

        log.trace("A query has been created of type [{}] and the following settings:\n{}", query.getClass()
                .toString(), query.toString());

        if (null == site) log.trace("A query will be sent to the entity-hub of type [{}].", entityhub
                .getClass());
        else log.trace("A query will be sent to a site [id :: {}][type :: {}].", site.getId(), site
                .getClass());
View Full Code Here


        //build the query and than return the result
        Site site = getSearchService();
        if(site == null){
            throw new IllegalStateException("ReferencedSite "+siteId+" is currently not available");
        }
        FieldQuery query = EntitySearcherUtils.createFieldQuery(site.getQueryFactory(),
            field, includeFields, search, languages);
        if(limit != null){
            query.setLimit(limit);
        }
        QueryResultList<Representation> results;
        try {
            results = site.find(query);
        } catch (SiteException e) {
View Full Code Here

    public static void main(String[] args) {
        SolrQueryFactory factory = new SolrQueryFactory(
            InMemoryValueFactory.getInstance(),
            IndexValueFactory.getInstance(),
            new SolrFieldMapper(null));
        FieldQuery query = DefaultQueryFactory.getInstance().createFieldQuery();
//        query.setConstraint("urn:field2", new TextConstraint("test","en","de"));
        query.setConstraint("urn:field3", new TextConstraint(Arrays.asList(
            "text value","anothertest","some more values"),"en","de",null));
        query.addSelectedField("urn:field2a");
        query.addSelectedField("urn:field3");
        query.setLimit(5);
        query.setOffset(5);
        SolrQuery solrQuery = factory.parseFieldQuery(query, SELECT.QUERY);
        System.out.println(solrQuery.getQuery());
    }
View Full Code Here

                                         Span[] tokenSpans,
                                         int offset,
                                         int startIndex,
                                         int endIndex) throws EngineException {
        List<Representation> processedResults;
        FieldQuery query = site != null ?
                site.getQueryFactory().createFieldQuery() :
                    entityhub.getQueryFactory().createFieldQuery();
        query.addSelectedField(nameField);
        query.addSelectedField(NamespaceEnum.rdfs+"comment");
        query.addSelectedField(NamespaceEnum.rdf+"type");
        query.addSelectedField(NamespaceEnum.rdfs+"seeAlso");
        query.setConstraint(nameField, new TextConstraint(searchString));//,language));
        //select 5 times the number of suggestion to allow some post
        //filtering
        //TODO: convert this to additional queries with offset
        query.setLimit(Integer.valueOf(maxSuggestions*5));
        QueryResultList<Representation> result;
        try {
            result = site != null ? site.find(query): entityhub.find(query);
        } catch (EntityhubException e) {
            throw new EngineException(this,ci,String.format(
                "Unable to search for Entity wiht label '%s@%s'",
                searchString,language),e);
        }
        if(!result.isEmpty()){
            processedResults = new ArrayList<Representation>(maxSuggestions);
            for(Iterator<Representation> it = result.iterator();it.hasNext() && processedResults.size()<maxSuggestions;){
                Representation rep = it.next();
                if(checkLabels(rep.getText(nameField),language,searchTokens)){
                    //based on the configuration we might need to do things for
                    //redirects (rdfs:seeAlso links)
                    rep = processRedirects(site, rep, query.getSelectedFields());
                    processedResults.add(rep);
                } //else ignore this result
            }
        } else {
            processedResults = Collections.emptyList();
View Full Code Here

    private ReferencedSiteManager referencedSiteManager;

    @Override
    public Map<String,List<RelatedKeyword>> search(String keyword) throws SearchException {
        Map<String,List<RelatedKeyword>> results = new HashMap<String,List<RelatedKeyword>>();
        FieldQuery fieldQuery = getFieldQuery(keyword);
        QueryResultList<Representation> externalEnties = referencedSiteManager.find(fieldQuery);
        String entityId = null;
        if (externalEnties != null && externalEnties.size() > 0) {
            entityId = externalEnties.iterator().next().getId();
            try {
View Full Code Here

        return search(keyword);
    }

    private FieldQuery getFieldQuery(String keyword) {
        FieldQueryFactory qf = DefaultQueryFactory.getInstance();
        FieldQuery fieldQuery = qf.createFieldQuery();
        Collection<String> selectedFields = new ArrayList<String>();
        selectedFields.add(RDFS.label.getUnicodeString());
        fieldQuery.addSelectedFields(selectedFields);
        fieldQuery.setConstraint(RDFS.label.getUnicodeString(), new TextConstraint(keyword,
                PatternType.wildcard, false, "en"));
        fieldQuery.setLimit(1);
        fieldQuery.setOffset(0);
        return fieldQuery;
    }
View Full Code Here

        test1.add(field, "This is the text content of a field with value1.");
        test2.add(field, "This is the text content of a field with value2.");
        Iterable<Representation> updatedIterable = yard.update(Arrays.asList(test1, test2));
        assertNotNull(updatedIterable);

        FieldQuery query = yard.getQueryFactory().createFieldQuery();
        query.setConstraint(field, new TextConstraint(Arrays.asList("text content")));
        QueryResultList<Representation> results = yard.find(query);
        assertEquals(2, results.size());

        // fetch the light / minimal representation
        query = yard.getQueryFactory().createFieldQuery();
        query.setConstraint(field, new TextConstraint(Arrays.asList("value2")));
        results = yard.find(query);
        assertEquals(1, results.size());
        Representation result = results.iterator().next();
        assertEquals("urn:yard.test.testFieldQuery:representation.id2", result.getId());
        assertEquals(null, result.getFirst(field));
View Full Code Here

        Iterable<Representation> updatedIterable = yard.update(Arrays.asList(test1, test2, test3));
        assertNotNull(updatedIterable);

        // Perform a first similarity query that looks a lot like the first document
        FieldQuery query = yard.getQueryFactory().createFieldQuery();
        query.setConstraint(similarityfield, new SimilarityConstraint("aaaa aaaa aaaa aaaa zzzz yyyy"));
        QueryResultList<Representation> results = yard.find(query);
        assertEquals(2, results.size());
        Iterator<Representation> it = results.iterator();
        Representation first = it.next();
        assertEquals("urn:yard.test.testFieldQueryWithSimilarityConstraint:representation.id1", first.getId());
        // assertEquals(0.99, first.getFirst("http://www.iks-project.eu/ontology/rick/query/score"));

        Representation second = it.next();
        assertEquals("urn:yard.test.testFieldQueryWithSimilarityConstraint:representation.id2",
            second.getId());
        // assertEquals(0.80, first.getFirst("http://www.iks-project.eu/ontology/rick/query/score"));

        // combine similarity with traditional filtering
        query = yard.getQueryFactory().createFieldQuery();
        query.setConstraint(similarityfield, new SimilarityConstraint("aaaa aaaa aaaa aaaa zzzz yyyy"));
        query.setConstraint(filterfield, new TextConstraint(Arrays.asList("other")));
        results = yard.find(query);
        assertEquals(1, results.size());
        it = results.iterator();
        first = it.next();
        assertEquals("urn:yard.test.testFieldQueryWithSimilarityConstraint:representation.id2", first.getId());
View Full Code Here

            List<UriRef> subsumedAnnotations) throws EntityhubException {
        // First get the required properties for the parsed textAnnotation
        // ... and check the values

        log.debug("Process {}", namedEntity);
        FieldQuery query = site == null ? //if site is NULL use the Entityhub
                entityhub.getQueryFactory().createFieldQuery() :
                    site.getQueryFactory().createFieldQuery();
        // replace spaces with plus to create an AND search for all words in the name!
        query.setConstraint(nameField, new TextConstraint(namedEntity.getName()));// name.replace(' ', '+')));
        if (OntologicalClasses.DBPEDIA_PERSON.equals(namedEntity.getType())) {
            if (personState) {
                if (personType != null) {
                    query.setConstraint(RDF_TYPE.getUnicodeString(), new ReferenceConstraint(personType));
                }
                // else no type constraint
            } else {
                // ignore people
                return Collections.emptyList();
            }
        } else if (DBPEDIA_ORGANISATION.equals(namedEntity.getType())) {
            if (orgState) {
                if (orgType != null) {
                    query.setConstraint(RDF_TYPE.getUnicodeString(), new ReferenceConstraint(orgType));
                }
                // else no type constraint
            } else {
                // ignore people
                return Collections.emptyList();
            }
        } else if (OntologicalClasses.DBPEDIA_PLACE.equals(namedEntity.getType())) {
            if (this.placeState) {
                if (this.placeType != null) {
                    query.setConstraint(RDF_TYPE.getUnicodeString(), new ReferenceConstraint(placeType));
                }
                // else no type constraint
            } else {
                // ignore people
                return Collections.emptyList();
            }
        }
        query.setLimit(Math.max(20,this.numSuggestions*3));
        QueryResultList<Entity> results = site == null? //if site is NULL
                entityhub.findEntities(query) : //use the Entityhub
                    site.findEntities(query); //else the referenced site
        log.debug("{} results returned by query {}", results.size(), query);

View Full Code Here

        }
        pattern = pattern.trim();
        pattern += "*";

        FieldQueryFactory qf = DefaultQueryFactory.getInstance();
        FieldQuery fieldQuery = qf.createFieldQuery();
        Collection<String> selectedFields = new ArrayList<String>();
        selectedFields.add(DEFAULT_AUTOCOMPLETE_SEARCH_FIELD);
        fieldQuery.addSelectedFields(selectedFields);
        fieldQuery.setConstraint(DEFAULT_AUTOCOMPLETE_SEARCH_FIELD, new TextConstraint(pattern,
                PatternType.wildcard, false, "en"));
        fieldQuery.setLimit(AUTOCOMPLETED_KEYWORD_NUMBER);
        fieldQuery.setOffset(0);

        List<String> result = new ArrayList<String>();
        QueryResultList<Representation> entityhubResult = referencedSiteManager.find(fieldQuery);
        for (Representation rep : entityhubResult) {
            result.add(rep.getFirst(DEFAULT_AUTOCOMPLETE_SEARCH_FIELD).toString());
View Full Code Here

TOP

Related Classes of org.apache.stanbol.entityhub.servicesapi.query.FieldQuery

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.