Package org.apache.clerezza.rdf.core

Examples of org.apache.clerezza.rdf.core.Graph.filter()


   * add the language to Clerezza over the addLanguage()-method in this class.
   */
  public Set<UriRef> getAllLanguages() {
    Set<UriRef> result = new HashSet<UriRef>();
    Graph lingvojGraph = getLingvojGraph();
    Iterator<Triple> languages = lingvojGraph.filter(null, RDFS.isDefinedBy,
        null);
    while (languages.hasNext()) {
      UriRef languageUri = (UriRef) languages.next().getSubject();
      result.add(languageUri);
    }
View Full Code Here


   * add the language to Clerezza over the addLanguage()-method in this class.
   * @return a language uris
   */
  public UriRef getLanguage(String languageName, Language inLanguage) {
    Graph lingvojGraph = getLingvojGraph();
    Iterator<Triple> languages = lingvojGraph.filter(null, RDFS.isDefinedBy, null);
    while (languages.hasNext()) {
      GraphNode languageNode = new GraphNode((UriRef) languages.next().getSubject(), lingvojGraph);
      Iterator<Resource> labels = languageNode.getObjects(RDFS.label);
      while (labels.hasNext()) {
        PlainLiteral label = (PlainLiteral) labels.next();
View Full Code Here

   */
  private void retrieveConcepts(ConceptProvider conceptProvider,
      boolean first, GraphNode resultNode, String searchTerm) {
    MGraph resultMGraph = (MGraph) resultNode.getGraph();
    Graph graph = conceptProvider.retrieveConcepts(searchTerm);
    Iterator<Triple> concepts = graph.filter(null, RDF.type, SKOS.Concept);
    if (first) {
      while (concepts.hasNext()) {
        resultNode.addProperty(QUERYRESULT.concept, concepts.next()
            .getSubject());
      }
View Full Code Here

                Iterator<Triple> it;
                Set<NonLiteral> ontologies = new HashSet<NonLiteral>();
                Set<Resource> importTargets = new HashSet<Resource>();
                custom = this.getCustomSpace().export(Graph.class, merge);
                // root.addAll(space);
                it = custom.filter(null, RDF.type, OWL.Ontology);
                while (it.hasNext())
                    ontologies.add(it.next().getSubject());
                it = custom.filter(null, OWL.imports, null);
                while (it.hasNext())
                    importTargets.add(it.next().getObject());
View Full Code Here

                custom = this.getCustomSpace().export(Graph.class, merge);
                // root.addAll(space);
                it = custom.filter(null, RDF.type, OWL.Ontology);
                while (it.hasNext())
                    ontologies.add(it.next().getSubject());
                it = custom.filter(null, OWL.imports, null);
                while (it.hasNext())
                    importTargets.add(it.next().getObject());
                core = this.getCoreSpace().export(Graph.class, merge);
                // root.addAll(space);
                it = core.filter(null, RDF.type, OWL.Ontology);
View Full Code Here

                Set<Resource> importTargets = new HashSet<Resource>();
                for (OWLOntologyID ontologyId : managedOntologies) {
                    Graph g = getOntology(ontologyId, Graph.class, false);
                    root.addAll(g);

                    it = g.filter(null, OWL.imports, null);
                    while (it.hasNext()) {
                        IRI tgt;
                        Resource r = it.next().getObject();
                        try {
                            if (r instanceof UriRef) tgt = IRI.create(((UriRef) r).getUnicodeString());
View Full Code Here

                            continue;
                        }

                    }

                    it = g.filter(null, RDF.type, OWL.Ontology);
                    while (it.hasNext()) {
                        NonLiteral ontology = it.next().getSubject();
                        log.debug("Removing all triples related to {} from {}", ontology, iri);
                        Iterator<Triple> it2 = g.filter(ontology, null, null);
                        while (it2.hasNext())
View Full Code Here

                    it = g.filter(null, RDF.type, OWL.Ontology);
                    while (it.hasNext()) {
                        NonLiteral ontology = it.next().getSubject();
                        log.debug("Removing all triples related to {} from {}", ontology, iri);
                        Iterator<Triple> it2 = g.filter(ontology, null, null);
                        while (it2.hasNext())
                            root.remove(it2.next());
                    }

                    /*
 
View Full Code Here

        //NOTE: Jena TDB does not support getSize for the union graph
//        int expectedTripleCount = persons.size()+orgdata.size();
//        Assert.assertEquals("Uniongraph has "+data.size()
//            +" triples (expected "+expectedTripleCount+")",
//            expectedTripleCount, data.size());
        Iterator<Triple> it = data.filter(null, type, companyType);
        Set<UriRef> expected = new HashSet<UriRef>(Arrays.asList(talinor,kondalor));
        while(it.hasNext()){
            NonLiteral subject = it.next().getSubject();
            Assert.assertTrue("Unexpected "+subject, expected.remove(subject));
        }
View Full Code Here

            NonLiteral subject = it.next().getSubject();
            Assert.assertTrue("Unexpected "+subject, expected.remove(subject));
        }
        Assert.assertTrue("Missing "+expected, expected.isEmpty());

        it = data.filter(null, type, personType);
        expected = new HashSet<UriRef>(Arrays.asList(john,tim));
        while(it.hasNext()){
            NonLiteral subject = it.next().getSubject();
            Assert.assertTrue("Unexpected "+subject, expected.remove(subject));
        }
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.