Package org.apache.stanbol.commons.indexedgraph

Examples of org.apache.stanbol.commons.indexedgraph.IndexedMGraph


     *            {@link String}
     * @return the {@link OWLOntology} of the entity
     */
    private MGraph populateWithEntity(String entityURI, MGraph target) {
        log.debug("Requesting signature of entity {}", entityURI);
        MGraph graph = target != null ? target : new IndexedMGraph();
        // Query the Entity Hub
        Entity signature = referencedSiteManager.getEntity(entityURI);
        if (signature != null) {
            RdfRepresentation rdfSignature = RdfValueFactory.getInstance().toRdfRepresentation(
                signature.getRepresentation());
View Full Code Here


   
    @BeforeClass
    public static void init() throws IOException, ConfigurationException {
        InputStream in = TextAnnotationNewModelEngineTest.class.getClassLoader().getResourceAsStream(TEST_ENHANCEMENTS);
        Assert.assertNotNull("Unable to load reaource '"+TEST_ENHANCEMENTS+"' via Classpath",in);
        origEnhancements = new IndexedMGraph();
        rdfParser.parse(origEnhancements, in, SupportedFormat.RDF_XML, null);
        Assert.assertFalse(origEnhancements.isEmpty());
        //parse the ID of the ContentItem form the enhancements
        Iterator<Triple> it = origEnhancements.filter(null, Properties.ENHANCER_EXTRACTED_FROM, null);
        Assert.assertTrue(it.hasNext());
View Full Code Here

    }
   
    @Before
    public void initTest() throws IOException {
        contentItem = ciFactory.createContentItem(ciUri,
            new StringSource(SINGLE_SENTENCE), new IndexedMGraph(origEnhancements));
    }
View Full Code Here

                OWLOntology o = OWLAPIToClerezzaConverter.clerezzaGraphToOWLOntology(graph, mgr);
                return o;
            }

            // FIXME when there's more than one ontology, this way of merging them seems inefficient...
            TripleCollection tempGraph = new IndexedMGraph();
            // The set of triples that will be excluded from the merge
            Set<Triple> exclusions = new HashSet<Triple>();
            // Examine all reverse imports
            for (OWLOntologyID ref : revImps)
                if (!loaded.contains(ref)) {
                    // Get the triples
                    TripleCollection imported =
                    // store.getTriples(ref);
                    getStoredOntology(getKey(ref), MGraph.class, false);
                    // For each owl:Ontology
                    Iterator<Triple> remove = imported.filter(null, RDF.type, OWL.Ontology);
                    while (remove.hasNext()) {
                        NonLiteral subj = remove.next().getSubject();
                        /*
                         * If it's not the root ontology, trash all its triples. If the root ontology is
                         * anonymous, all ontology annotations are to be trashed without distinction.
                         */
                        if (ontologyId == null || !subj.equals(ontologyId)) {
                            Iterator<Triple> it = imported.filter(subj, null, null);
                            while (it.hasNext()) {
                                Triple t = it.next();
                                exclusions.add(t);
                            }
                        }
                    }

                    Iterator<Triple> it = imported.iterator();
                    while (it.hasNext()) {
                        Triple t = it.next();
                        if (!exclusions.contains(t)) tempGraph.add(t);
                    }

                    loaded.add(ref);
                }
            // Since they are all merged and import statements removed, there should be no risk of going
View Full Code Here

         * TODO manage import rewrites better once the container ID is fully configurable (i.e. instead of
         * going upOne() add "session" or "ontology" if needed). But only do this if we keep considering
         * imported ontologies as *not* managed.
         */
        // if (!merge) { // TODO
        MGraph o = new IndexedMGraph(ontologyProvider.getStoredOntology(ontologyId, MGraph.class, merge));

        // Now rewrite import statements

        // Scan import statements for each owl:Ontology instance (hopefully one).
        String tid = getID();
        // Bit of a hack : since ontology spaces are named like {scopeid}/{core|custom}, in that particular
        // case we go back to {scopeid}, whereas for sessions we maintain their original id.
        if (backwardPathLength > 0) tid = tid.split("/")[0];

        Iterator<Triple> it;
        List<Triple> newImports = new LinkedList<Triple>();
        synchronized (o) {
            it = o.filter(null, OWL.imports, null);
            // We use this list to avoid concurrent modification exceptions.
            List<Triple> replaceUs = new LinkedList<Triple>();
            while (it.hasNext())
                replaceUs.add(it.next());

            for (Triple t : replaceUs) {
                String s = ((UriRef) (t.getObject())).getUnicodeString();
                // FIXME note the different import targets in the OWLOntology and TripleColllection objects!
                // s = s.substring(s.indexOf("::") + 2, s.length());
                boolean managed = managedOntologies.contains(IRI.create(s));
                UriRef target = new UriRef((managed ? universalPrefix + "/" + tid + "/"
                        : URIUtils.upOne(universalPrefix) + "/")
                                           + s);
                o.remove(t);
                newImports.add(new TripleImpl(t.getSubject(), OWL.imports, target));
            }
        }

        for (Triple t : newImports)
            o.add(t);

        // } // TODO else if (merge)

        return o;
    }
View Full Code Here

        final String resourcePath = uriInfo.getAbsolutePath().toString();
        //The URI at which this service was accessed accessed, this will be the
        //central serviceUri in the response
        final UriRef serviceUri = new UriRef(resourcePath);
        //the in memory graph to which the triples for the response are added
        final MGraph responseGraph = new IndexedMGraph();
        //This GraphNode represents the service within our result graph
        final GraphNode node = new GraphNode(serviceUri, responseGraph);
        //The triples will be added to the first graph of the union
        //i.e. to the in-memory responseGraph
        node.addProperty(RDF.type, Ontology.MultiEnhancer);
View Full Code Here

        if(engine == null){
            throw new IllegalArgumentException("The parsed EnhancementEngine MUST NOT be NULL!");
        }
        this.engine = engine;
        this.name = engine.getName()+"Chain";
        MGraph graph = new IndexedMGraph();
        writeExecutionNode(graph, createExecutionPlan(graph, name),
            engine.getName(), false, null);
        executionPlan = graph.getGraph();
    }
View Full Code Here

     * @param headers the http headers of the request
     * @return the response
     */
    private Response executeLDPathQuery(SiteManager manager,FieldQuery query, String ldpathProgramString, MediaType mediaType, HttpHeaders headers) {
        QueryResultList<Representation> result;
        ValueFactory vf = new RdfValueFactory(new IndexedMGraph());
        SiteManagerBackend backend = new SiteManagerBackend(manager);
        EntityhubLDPath ldPath = new EntityhubLDPath(backend,vf);
        //copy the selected fields, because we might need to delete some during
        //the preparation phase
        Set<String> selectedFields = new HashSet<String>(query.getSelectedFields());
View Full Code Here

    @BeforeClass
    public static void setUpServices() throws IOException {
        searcher = new TestSearcherImpl(TEST_REFERENCED_SITE_NAME,NAME,new SimpleLabelTokenizer());
        //add some terms to the searcher
        MGraph graph = new IndexedMGraph();
        UriRef uri = new UriRef("urn:test:PatrickMarshall");
        graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("Patrick Marshall")));
        graph.add(new TripleImpl(uri, TYPE, OntologicalClasses.DBPEDIA_PERSON));
        searcher.addEntity(new Entity(uri, graph));
       
        uri = new UriRef("urn:test:Geologist");
        graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("Geologist")));
        graph.add(new TripleImpl(uri, TYPE, new UriRef(NamespaceEnum.skos+"Concept")));
        graph.add(new TripleImpl(uri, REDIRECT, new UriRef("urn:test:redirect:Geologist")));
        searcher.addEntity(new Entity(uri, graph));
        //a redirect
        uri = new UriRef("urn:test:redirect:Geologist");
        graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("Geologe (redirect)")));
        graph.add(new TripleImpl(uri, TYPE, new UriRef(NamespaceEnum.skos+"Concept")));
        searcher.addEntity(new Entity(uri, graph));

        uri = new UriRef("urn:test:NewZealand");
        graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("New Zealand")));
        graph.add(new TripleImpl(uri, TYPE, OntologicalClasses.DBPEDIA_PLACE));
        searcher.addEntity(new Entity(uri, graph));

        uri = new UriRef("urn:test:UniversityOfOtago");
        graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("University of Otago")));
        graph.add(new TripleImpl(uri, TYPE, OntologicalClasses.DBPEDIA_ORGANISATION));
        searcher.addEntity(new Entity(uri, graph));
       
        uri = new UriRef("urn:test:University");
        graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("University")));
        graph.add(new TripleImpl(uri, TYPE, new UriRef(NamespaceEnum.skos+"Concept")));
        searcher.addEntity(new Entity(uri, graph));

        uri = new UriRef("urn:test:Otago");
        graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("Otago")));
        graph.add(new TripleImpl(uri, TYPE, OntologicalClasses.DBPEDIA_PLACE));
        searcher.addEntity(new Entity(uri, graph));
        //add a 2nd Otago (Place and University
        uri = new UriRef("urn:test:Otago_Texas");
        graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("Otago (Texas)")));
        graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("Otago")));
        graph.add(new TripleImpl(uri, TYPE, OntologicalClasses.DBPEDIA_PLACE));
        searcher.addEntity(new Entity(uri, graph));

        uri = new UriRef("urn:test:UniversityOfOtago_Texas");
        graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("University of Otago (Texas)")));
        graph.add(new TripleImpl(uri, TYPE, OntologicalClasses.DBPEDIA_ORGANISATION));
        searcher.addEntity(new Entity(uri, graph));
       
        TEST_ANALYSED_TEXT = AnalysedTextFactory.getDefaultInstance().createAnalysedText(
            ciFactory.createBlob(new StringSource(TEST_TEXT)));
        TEST_ANALYSED_TEXT_WO = AnalysedTextFactory.getDefaultInstance().createAnalysedText(
View Full Code Here

    public static final float PERCENTAGE_LINKED = 0.3f;
    public static final float PERCENTAGE_PRESENT = 0.9f;
   
    @BeforeClass
    public static void setUpServices() throws IOException {
        testData = new IndexedMGraph();
        long seed = System.currentTimeMillis();
        log.info("Test seed "+ seed);
        Random random = new Random(seed);
        int numEntities = 0;
        for(int i = 0; i < NUM_ENTITIES ; i++){
            if(random.nextFloat() <= PERCENTAGE_PRESENT){ //do not create all entities
                UriRef uri = new UriRef("urn:test:entity"+i);
                testData.add(new TripleImpl(uri, RDF_TYPE, SKOS_CONCEPT));
                testData.add(new TripleImpl(uri, RDFS_LABEL,
                    new PlainLiteralImpl("entity "+i, LANG_EN)));
                testData.add(new TripleImpl(uri, RDFS_LABEL,
                    new PlainLiteralImpl("Entity "+i, LANG_DE)));
                testData.add(new TripleImpl(uri, SKOS_NOTATION,
                    lf.createTypedLiteral(i)));
                numEntities++;
            }
        }
        log.info(" ... created {} Entities",numEntities);
        testMetadata = new IndexedMGraph();
        int numLinks = 0;
        for(int i = 0; i < NUM_ENTITIES ; i++){
            if(random.nextFloat() < PERCENTAGE_LINKED){
                UriRef enhancementUri = new UriRef("urn:test:enhancement"+i);
                UriRef entityUri = new UriRef("urn:test:entity"+i);
View Full Code Here

TOP

Related Classes of org.apache.stanbol.commons.indexedgraph.IndexedMGraph

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.