Package org.apache.clerezza.rdf.core

Examples of org.apache.clerezza.rdf.core.LiteralFactory


            }
        }

        OWLOntologyID buildPublicKey(final UriRef resource) {
            // TODO desanitize?
            LiteralFactory lf = LiteralFactory.getInstance();
            IRI oiri = null, viri = null;
            Iterator<Triple> it = graph.filter(resource, HAS_ONTOLOGY_IRI_URIREF, null);
            if (it.hasNext()) {
                UriRef s = null;
                Resource obj = it.next().getObject();
                if (obj instanceof UriRef) s = ((UriRef) obj);
                else if (obj instanceof TypedLiteral) s = lf.createObject(UriRef.class, (TypedLiteral) obj);
                oiri = IRI.create(s.getUnicodeString());
            } else {
                // Anonymous ontology? Decode the resource itself (which is not null)
                return OntologyUtils.decode(resource.getUnicodeString());
            }
            it = graph.filter(resource, HAS_VERSION_IRI_URIREF, null);
            if (it.hasNext()) {
                UriRef s = null;
                Resource obj = it.next().getObject();
                if (obj instanceof UriRef) s = ((UriRef) obj);
                else if (obj instanceof TypedLiteral) s = lf.createObject(UriRef.class, (TypedLiteral) obj);
                viri = IRI.create(s.getUnicodeString());
            }
            if (viri == null) return new OWLOntologyID(oiri);
            else return new OWLOntologyID(oiri, viri);
        }
View Full Code Here


                    "Cannot build a UriRef resource on an anonymous public key!");

            log.debug("Searching for a meta graph entry for public key:");
            log.debug(" -- {}", publicKey);
            UriRef match = null;
            LiteralFactory lf = LiteralFactory.getInstance();
            TypedLiteral oiri = lf.createTypedLiteral(new UriRef(ontologyIri.toString()));
            TypedLiteral viri = versionIri == null ? null : lf.createTypedLiteral(new UriRef(versionIri
                    .toString()));
            for (Iterator<Triple> it = meta.filter(null, HAS_ONTOLOGY_IRI_URIREF, oiri); it.hasNext();) {
                Resource subj = it.next().getSubject();
                log.debug(" -- Ontology IRI match found. Scanning");
                log.debug(" -- Resource : {}", subj);
View Full Code Here

        }

        Set<OWLOntologyID> getVersions(IRI ontologyIri) {
            if (ontologyIri == null) throw new IllegalArgumentException("Cannot get versions for a null IRI.");
            Set<OWLOntologyID> keys = new HashSet<OWLOntologyID>();
            LiteralFactory lf = LiteralFactory.getInstance();
            TypedLiteral iri = lf.createTypedLiteral(new UriRef(ontologyIri.toString()));
            // Exclude aliases.
            for (Iterator<Triple> it = graph.filter(null, HAS_ONTOLOGY_IRI_URIREF, iri); it.hasNext();) {
                Resource sub = it.next().getSubject();
                if (sub instanceof UriRef) keys.add(buildPublicKey((UriRef) sub));
            }
View Full Code Here

    @Test
    public void testDouble(){
        MGraph graph = new IndexedMGraph();
        UriRef id = new UriRef("http://www.example.org/test");
        UriRef doubleTestField = new UriRef("http://www.example.org/field/double");
        LiteralFactory lf = LiteralFactory.getInstance();
        graph.add(new TripleImpl(id, doubleTestField, lf.createTypedLiteral(Double.NaN)));
        graph.add(new TripleImpl(id, doubleTestField, lf.createTypedLiteral(Double.POSITIVE_INFINITY)));
        graph.add(new TripleImpl(id, doubleTestField, lf.createTypedLiteral(Double.NEGATIVE_INFINITY)));
       
        RdfValueFactory vf = new RdfValueFactory(graph);
        Representation r = vf.createRepresentation(id.getUnicodeString());
        Set<Double> expected = new HashSet<Double>(Arrays.asList(
            Double.NaN, Double.POSITIVE_INFINITY,Double.NEGATIVE_INFINITY));
View Full Code Here

            new PlainLiteralImpl("ja")));
    }
   
    @Test
    public void testEngine() throws EngineException {
        LiteralFactory lf = LiteralFactory.getInstance();
        Assert.assertEquals(EnhancementEngine.ENHANCE_ASYNC, engine.canEnhance(contentItem));
        engine.computeEnhancements(contentItem);
        //assert the results
        Map<UriRef,Resource> expected = new HashMap<UriRef,Resource>();
        expected.put(Properties.DC_CREATOR, lf.createTypedLiteral(engine.getClass().getName()));
        expected.put(Properties.ENHANCER_EXTRACTED_FROM,contentItem.getUri());
        Assert.assertEquals(16, EnhancementStructureHelper.validateAllTextAnnotations(
            contentItem.getMetadata(), text, expected));
        AnalysedText at = AnalysedTextUtils.getAnalysedText(contentItem);
        Assert.assertNotNull(at);
View Full Code Here

        if(writeTextAnnotations){
            Iterator<Span> spans = at.getEnclosed(EnumSet.of(SpanTypeEnum.Sentence,SpanTypeEnum.Chunk));
            Sentence context = null;
            MGraph metadata = ci.getMetadata();
            Language lang = new Language(language);
            LiteralFactory lf = LiteralFactory.getInstance();
            ci.getLock().writeLock().lock();
            try { //write TextAnnotations for Named Entities
                while(spans.hasNext()){
                    Span span = spans.next();
                    switch (span.getType()) {
                        case Sentence:
                            context = (Sentence)context;
                            break;
                        default:
                            Value<NerTag> nerAnno = span.getAnnotation(NER_ANNOTATION);
                            if(nerAnno != null){
                                UriRef ta = EnhancementEngineHelper.createTextEnhancement(ci, this);
                                //add span related data
                                metadata.add(new TripleImpl(ta, ENHANCER_SELECTED_TEXT,
                                    new PlainLiteralImpl(span.getSpan(), lang)));
                                metadata.add(new TripleImpl(ta, ENHANCER_START,
                                    lf.createTypedLiteral(span.getStart())));
                                metadata.add(new TripleImpl(ta, ENHANCER_END,
                                    lf.createTypedLiteral(span.getEnd())));
                                metadata.add(new TripleImpl(ta, ENHANCER_SELECTION_CONTEXT,
                                    new PlainLiteralImpl(context == null ?
                                            getDefaultSelectionContext(at.getSpan(), span.getSpan(), span.getStart()) :
                                                context.getSpan(), lang)));
                                //add the NER type
                                if(nerAnno.value().getType() != null){
                                    metadata.add(new TripleImpl(ta,DC_TYPE,nerAnno.value().getType()));
                                }
                                if(nerAnno.probability() >= 0) {
                                    metadata.add(new TripleImpl(ta, ENHANCER_CONFIDENCE,
                                        lf.createTypedLiteral(nerAnno.probability())));
                                }
                            }
                            break;
                    }
                }
View Full Code Here

        }
        UriRef precision = new UriRef(NamespaceEnum.fise + "classifier/precision");
        UriRef recall = new UriRef(NamespaceEnum.fise + "classifier/recall");
        UriRef f1 = new UriRef(NamespaceEnum.fise + "classifier/f1");

        LiteralFactory lf = LiteralFactory.getInstance();
        ci.getLock().writeLock().lock();
        try {
            // Global text annotation to attach all the topic annotation to it.
            UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
            metadata.add(new TripleImpl(textAnnotation,
                    org.apache.stanbol.enhancer.servicesapi.rdf.Properties.DC_TYPE,
                    OntologicalClasses.SKOS_CONCEPT));
            for (TopicSuggestion topic : topics) {
                UriRef enhancement = EnhancementEngineHelper.createEntityEnhancement(ci, this);
                metadata.add(new TripleImpl(enhancement,
                        org.apache.stanbol.enhancer.servicesapi.rdf.Properties.RDF_TYPE,
                        TechnicalClasses.ENHANCER_TOPICANNOTATION));
                metadata.add(new TripleImpl(enhancement,
                        org.apache.stanbol.enhancer.servicesapi.rdf.Properties.DC_RELATION, textAnnotation));

                // add link to entity
                metadata.add(new TripleImpl(enhancement,
                        org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_ENTITY_REFERENCE,
                        new UriRef(topic.conceptUri)));
                metadata.add(new TripleImpl(enhancement,
                        org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_ENTITY_TYPE,
                        OntologicalClasses.SKOS_CONCEPT));

                // add confidence information
                metadata.add(new TripleImpl(enhancement,
                        org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_CONFIDENCE, lf
                                .createTypedLiteral(Double.valueOf(topic.score))));

                // add performance estimates of the classifier if available
                ClassificationReport perf = getPerformanceEstimates(topic.conceptUri);
                if (perf.uptodate) {
                    metadata.add(new TripleImpl(enhancement, precision, lf.createTypedLiteral(Double
                            .valueOf(perf.precision))));
                    metadata.add(new TripleImpl(enhancement, recall, lf.createTypedLiteral(Double
                            .valueOf(perf.recall))));
                    metadata.add(new TripleImpl(enhancement, f1, lf.createTypedLiteral(Double
                            .valueOf(perf.f1))));
                }
                // fetch concept label from the entityhub or a referenced site if available
                Entity entity = entityhub.getEntity(topic.conceptUri);
                if (entity == null) {
View Full Code Here

    @Test
    public void dateStorage() {
        MGraph graph = getEmptyMGraph();
        Date date = new Date(0);
        LiteralFactory literalFactory = LiteralFactory.getInstance();
        TypedLiteral dateLiteral = literalFactory.createTypedLiteral(date);
        Triple triple = new TripleImpl(new BNode(), new UriRef("http://example.com/property"), dateLiteral);
        graph.add(triple);
        Assert.assertTrue(graph.contains(triple));
    }
View Full Code Here

    @Test
    public void dateStorage2() {
        MGraph graph = getEmptyMGraph();
        Date date = new Date(0);
        LiteralFactory literalFactory = LiteralFactory.getInstance();
        TypedLiteral dateLiteral = literalFactory.createTypedLiteral(date);
        System.out.println(dateLiteral);
        UriRef property = new UriRef("http://example.com/property");
        Triple triple = new TripleImpl(new BNode(), property, dateLiteral);
        graph.add(triple);
View Full Code Here

    private static void createGraph(TripleCollection tc, int triples, Long seed){
        Random rnd = new Random();
        if(seed != null){
             rnd.setSeed(seed);
        }
        LiteralFactory lf = LiteralFactory.getInstance();
        //randoms are in the range [0..3]
        double l = 1.0; //literal
        double i = l / 3; //int
        double d = l * 2 / 3;//double
        double b = 2.0;//bNode
        double nb = b - (l * 2 / 3); //create new bNode
        double random;
        NonLiteral subject = null;
        UriRef predicate = null;
        List<UriRef> predicateList = new ArrayList<UriRef>();
        predicateList.add(RDF.first);
        predicateList.add(RDF.rest);
        predicateList.add(RDF.type);
        predicateList.add(RDFS.label);
        predicateList.add(RDFS.comment);
        predicateList.add(RDFS.range);
        predicateList.add(RDFS.domain);
        predicateList.add(FOAF.name);
        predicateList.add(FOAF.nick);
        predicateList.add(FOAF.homepage);
        predicateList.add(FOAF.age);
        predicateList.add(FOAF.depiction);
        String URI_PREFIX = "http://www.test.org/bigGraph/ref";
        Language DE = new Language("de");
        Language EN = new Language("en");
        Iterator<UriRef> predicates = predicateList.iterator();
        List<BNode> bNodes = new ArrayList<BNode>();
        bNodes.add(new BNode());
        for (int count = 0; tc.size() < triples; count++) {
            random = rnd.nextDouble() * 3;
            if (random >= 2.5 || count == 0) {
                if (random <= 2.75) {
                    subject = new UriRef(URI_PREFIX + count);
                } else {
                    int rndIndex = (int) ((random - 2.75) * bNodes.size() / (3.0 - 2.75));
                    subject = bNodes.get(rndIndex);
                }
            }
            if (random > 2.0 || count == 0) {
                if (!predicates.hasNext()) {
                    Collections.shuffle(predicateList,rnd);
                    predicates = predicateList.iterator();
                }
                predicate = predicates.next();
            }
            if (random <= l) { //literal
                if (random <= i) {
                    tc.add(new TripleImpl(subject, predicate, lf.createTypedLiteral(count)));
                } else if (random <= d) {
                    tc.add(new TripleImpl(subject, predicate, lf.createTypedLiteral(random)));
                } else {
                    PlainLiteral text;
                    if (random <= i) {
                        text = new PlainLiteralImpl("Literal for " + count);
                    } else if (random <= d) {
View Full Code Here

TOP

Related Classes of org.apache.clerezza.rdf.core.LiteralFactory

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.