Package org.apache.clerezza.rdf.core

Examples of org.apache.clerezza.rdf.core.LiteralFactory


    private static void createGraph(Collection<Triple> tc, int triples, Long seed){
        Random rnd = new Random();
        if(seed != null){
             rnd.setSeed(seed);
        }
        LiteralFactory lf = LiteralFactory.getInstance();
        //randoms are in the range [0..3]
        double l = 1.0; //literal
        double i = l / 3; //int
        double d = l * 2 / 3;//double
        double b = 2.0;//bNode
        double nb = b - (l * 2 / 3); //create new bNode
        double random;
        NonLiteral subject = null;
        UriRef predicate = null;
        List<UriRef> predicateList = new ArrayList<UriRef>();
        predicateList.add(RDF.first);
        predicateList.add(RDF.rest);
        predicateList.add(RDF.type);
        predicateList.add(RDFS.label);
        predicateList.add(RDFS.comment);
        predicateList.add(RDFS.range);
        predicateList.add(RDFS.domain);
        predicateList.add(FOAF.name);
        predicateList.add(FOAF.nick);
        predicateList.add(FOAF.homepage);
        predicateList.add(FOAF.age);
        predicateList.add(FOAF.depiction);
        String URI_PREFIX = "http://www.test.org/bigGraph/ref";
        Language DE = new Language("de");
        Language EN = new Language("en");
        Iterator<UriRef> predicates = predicateList.iterator();
        List<BNode> bNodes = new ArrayList<BNode>();
        bNodes.add(new BNode());
        for (int count = 0; tc.size() < triples; count++) {
            random = rnd.nextDouble() * 3;
            if (random >= 2.5 || count == 0) {
                if (random <= 2.75) {
                    subject = new UriRef(URI_PREFIX + count);
                } else {
                    int rndIndex = (int) ((random - 2.75) * bNodes.size() / (3.0 - 2.75));
                    subject = bNodes.get(rndIndex);
                }
            }
            if (random > 2.0 || count == 0) {
                if (!predicates.hasNext()) {
                    Collections.shuffle(predicateList,rnd);
                    predicates = predicateList.iterator();
                }
                predicate = predicates.next();
            }
            if (random <= l) { //literal
                if (random <= i) {
                    tc.add(new TripleImpl(subject, predicate, lf.createTypedLiteral(count)));
                } else if (random <= d) {
                    tc.add(new TripleImpl(subject, predicate, lf.createTypedLiteral(random)));
                } else {
                    PlainLiteral text;
                    if (random <= i) {
                        text = new PlainLiteralImpl("Literal for " + count);
                    } else if (random <= d) {
View Full Code Here


                new Mapping.Converter(){//we need to convert from MByte/min to kByte/sec
                    @Override
                    public Resource convert(Resource value) {
                        if(value instanceof TypedLiteral &&
                                XSD.double_.equals(((TypedLiteral)value).getDataType())){
                            LiteralFactory lf = LiteralFactory.getInstance();
                            double mm = lf.createObject(Double.class, (TypedLiteral)value);
                            return lf.createTypedLiteral(Double.valueOf(
                                mm*1024/60));
                        } else {
                            return value; //do not convert
                        }
                    }
View Full Code Here

            }
        } else { // null indicates to use the Entityhub to lookup Entities
            site = null;
        }
        MGraph graph = ci.getMetadata();
        LiteralFactory literalFactory = LiteralFactory.getInstance();
        // Retrieve the existing text annotations (requires read lock)
        Map<NamedEntity,List<UriRef>> textAnnotations = new HashMap<NamedEntity,List<UriRef>>();
        // the language extracted for the parsed content or NULL if not
        // available
        String contentLangauge;
View Full Code Here

            throw new IllegalStateException("Unable to extract Language for " + "ContentItem " + ci.getUri() + ": This is also checked in the canEnhance " + "method! -> This indicated an Bug in the implementation of the " + "EnhancementJobManager!");
        }
        Language lang = new Language(language); //used for the palin literals in TextAnnotations
    try {
      List<NamedEntity> lista = this.client.extractEntities(text, language);
      LiteralFactory literalFactory = LiteralFactory.getInstance();

      MGraph g = ci.getMetadata();

      for (NamedEntity ne : lista) {
        try {
          UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
          //add selected text as PlainLiteral in the language extracted from the text
          g.add(new TripleImpl(textAnnotation, ENHANCER_SELECTED_TEXT,
              new PlainLiteralImpl(ne.getFormKind(),lang)));
          g.add(new TripleImpl(textAnnotation, DC_TYPE, getEntityRefForType(ne.type)));
          if (ne.getFrom() != null && ne.getTo() != null) {
            g.add(new TripleImpl(textAnnotation, ENHANCER_START, literalFactory.createTypedLiteral(
                ne.getFrom().intValue())));
            g.add(new TripleImpl(textAnnotation, ENHANCER_END, literalFactory.createTypedLiteral(
                ne.getTo().intValue())));
            g.add(new TripleImpl(textAnnotation, ENHANCER_SELECTION_CONTEXT,
                new PlainLiteralImpl(getSelectionContext(text, ne.getFormKind(), ne.getFrom().intValue()), lang)));
          }
        } catch (NoConvertorException e) {
View Full Code Here

        confidence
    }

    private void initOccurrences() {
        MGraph graph = contentItem.getMetadata();
        LiteralFactory lf = LiteralFactory.getInstance();
        Map<UriRef,Collection<NonLiteral>> suggestionMap = new HashMap<UriRef,Collection<NonLiteral>>();
        // 1) get Entity Annotations
        Map<NonLiteral,Map<EAProps,Object>> entitySuggestionMap = new HashMap<NonLiteral,Map<EAProps,Object>>();
        Iterator<Triple> entityAnnotations = graph.filter(null, RDF.type, ENHANCER_ENTITYANNOTATION);
        while(entityAnnotations.hasNext()){
View Full Code Here

    /**
     * @return an RDF/JSON descriptions of places for the word map widget
     */
    public String getPlacesAsJSON() throws ParseException, UnsupportedEncodingException {
        MGraph g = new IndexedMGraph();
        LiteralFactory lf = LiteralFactory.getInstance();
        MGraph metadata = contentItem.getMetadata();
        for (EntityExtractionSummary p : getPlaceOccurrences()) {
            EntitySuggestion bestGuess = p.getBestGuess();
            if (bestGuess == null) {
                continue;
            }
            UriRef uri = new UriRef(bestGuess.getUri());
            Iterator<Triple> latitudes = metadata.filter(uri, GEO_LAT, null);
            if (latitudes.hasNext()) {
                g.add(latitudes.next());
            }
            Iterator<Triple> longitutes = metadata.filter(uri, GEO_LONG, null);
            if (longitutes.hasNext()) {
                g.add(longitutes.next());
                g.add(new TripleImpl(uri, Properties.RDFS_LABEL, lf.createTypedLiteral(bestGuess.getLabel())));
            }
        }
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        serializer.serialize(out, g, SupportedFormat.RDF_JSON);
       
View Full Code Here

     *
     * @return the URI of the new enhancement instance
     */
    protected static UriRef createEnhancement(MGraph metadata,
            EnhancementEngine engine, UriRef contentItemId){
        LiteralFactory literalFactory = LiteralFactory.getInstance();

        UriRef enhancement = new UriRef("urn:enhancement-"
                + EnhancementEngineHelper.randomUUID());
        //add the Enhancement Type
        metadata.add(new TripleImpl(enhancement, RDF_TYPE,
                ENHANCER_ENHANCEMENT));
        //add the extracted from content item
        metadata.add(new TripleImpl(enhancement,
                ENHANCER_EXTRACTED_FROM, contentItemId));
        // creation date
        metadata.add(new TripleImpl(enhancement, DC_CREATED,
                literalFactory.createTypedLiteral(new Date())));

        // the engines that extracted the data
        // TODO: add some kind of versioning info for the extractor?
        // TODO: use a public dereferencing URI instead? that would allow for
        // explicit versioning too
        /* NOTE (Rupert Westenthaler 2010-05-26):
         * The Idea is to use the  ComponentContext in the activate() method of
         * an Enhancer to get the bundle name/version and use that as an
         * URI for the creator.
         * We would need to add getEnhancerID() method to the enhancer interface
         * to access this information
          */
        metadata.add(new TripleImpl(enhancement, DC_CREATOR,
                literalFactory.createTypedLiteral(engine.getClass().getName())));
        return enhancement;
    }
View Full Code Here

     * @param enhancement the enhancement
     * @param engine the engine
     */
    public static void addContributingEngine(MGraph metadata, UriRef enhancement,
                                             EnhancementEngine engine){
        LiteralFactory literalFactory = LiteralFactory.getInstance();
        // TODO: use a public dereferencing URI instead?
        metadata.add(new TripleImpl(enhancement, DC_CONTRIBUTOR,
            literalFactory.createTypedLiteral(engine.getClass().getName())));
        //set the modification date to the current date.
        set(metadata,enhancement,DC_MODIFIED,new Date(),literalFactory);
    }
View Full Code Here

     * @see EnhancementEngineHelper#createTextEnhancement(ContentItem, EnhancementEngine)
     */
    @Deprecated
    public static UriRef createNewExtraction(ContentItem ci,
            EnhancementEngine engine) {
        LiteralFactory literalFactory = LiteralFactory.getInstance();

        MGraph metadata = ci.getMetadata();
        UriRef extraction = new UriRef("urn:extraction-"
                + EnhancementEngineHelper.randomUUID());

        metadata.add(new TripleImpl(extraction, RDF_TYPE,
                ENHANCER_EXTRACTION));

        // relate the extraction to the content item
        metadata.add(new TripleImpl(extraction,
                ENHANCER_RELATED_CONTENT_ITEM, new UriRef(ci.getUri().getUnicodeString())));

        // creation date
        metadata.add(new TripleImpl(extraction, DC_CREATED,
                literalFactory.createTypedLiteral(new Date())));

        // the engines that extracted the data
        // TODO: add some kind of versioning info for the extractor?
        // TODO: use a public dereferencing URI instead? that would allow for
        // explicit versioning too
        metadata.add(new TripleImpl(extraction, DC_CREATOR,
                literalFactory.createTypedLiteral(engine.getClass().getName())));

        return extraction;
    }
View Full Code Here

            IRI ontologyIRI = ontologyReference.getOntologyIRI(), versionIri = ontologyReference
                    .getVersionIRI();
            UriRef entry = buildResource(ontologyReference);
            tType = new TripleImpl(entry, RDF.type, ENTRY_URIREF);
            tMaps = new TripleImpl(entry, MAPS_TO_GRAPH_URIREF, graphName);
            LiteralFactory lf = LiteralFactory.getInstance();
            tHasOiri = new TripleImpl(entry, HAS_ONTOLOGY_IRI_URIREF, lf.createTypedLiteral(new UriRef(
                    ontologyIRI.toString())));
            if (versionIri != null) tHasViri = new TripleImpl(entry, HAS_VERSION_IRI_URIREF,
                    lf.createTypedLiteral(new UriRef(versionIri.toString())));
            synchronized (graph) {
                graph.add(tType);
                graph.add(tMaps);
                if (tHasViri != null) graph.add(tHasViri);
                graph.add(tHasOiri);
View Full Code Here

TOP

Related Classes of org.apache.clerezza.rdf.core.LiteralFactory

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.