Package edu.isi.karma.er.helper

Examples of edu.isi.karma.er.helper.TripleStoreUtil


      }
      else {
        url = path.substring(1);
      }
    }
    TripleStoreUtil util = new TripleStoreUtil();
    //response.getWriter().println(TripleStoreUtil.defaultModelsRepoUrl);
    try {
      response.getWriter().println(util.getMappingFromTripleStore(serverAddress, context, url));
    } catch (Exception e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
  }
View Full Code Here


import edu.isi.karma.webserver.KarmaException;

public class LoadRDFToTripleStore {
 
  public static void main(String args[]) {
    TripleStoreUtil util = new TripleStoreUtil();
    Group options = createCommandLineOptions();
        Parser parser = new Parser();
        parser.setGroup(options);
        parser.setGroup(options);
        HelpFormatter hf = new HelpFormatter();
        parser.setHelpFormatter(hf);
        parser.setHelpTrigger("--help");
        CommandLine cl = parser.parseAndHelp(args);
        if (cl == null || cl.getOptions().size() == 0 || cl.hasOption("--help")) {
            hf.setGroup(options);
            hf.print();
            return;
        }
        String filepath = (String) cl.getValue("--filepath");
        String tripleStoreUrl = (String) cl.getValue("--triplestoreurl");
        String context = (String) cl.getValue("--context");
        if (filepath == null || tripleStoreUrl == null || context == null)
          return;
    File file = new File(filepath);
    if (file.isDirectory()) {
      File[] files = file.listFiles();
      for (File f : files) {
        System.out.println(f.getName());
        if (FileUtils.getExtension(f.getName()) != null)
          try {
            util.saveToStoreFromFile(f.getAbsolutePath(), tripleStoreUrl, context, false, null);
          } catch (KarmaException e) {
            System.err.println(e.getMessage());
          }
      }
    }
    else {
      if (FileUtils.getExtension(file.getName()) != null)
        try {
          util.saveToStoreFromFile(file.getAbsolutePath(), tripleStoreUrl, context, false, null);
        } catch (KarmaException e) {
          System.err.println(e.getMessage());
        }
    }
  }
View Full Code Here

  public UpdateContainer doIt(Workspace workspace) throws CommandException {

    Worksheet worksheet = workspace.getWorksheet(worksheetId);
    SuperSelection selection = getSuperSelection(worksheet);
    RepFactory factory = workspace.getFactory();
    TripleStoreUtil util = new TripleStoreUtil();
    HashMap<String, List<String>> result = null;
    nodeUri = nodeUri.trim();
    Map<String, Label> parents = workspace.getOntologyManager().getSuperClasses(nodeUri, true);
    Set<String> classes = new HashSet<String>(parents.keySet());
    classes.add(nodeUri);
    StringBuilder builder = new StringBuilder();
    nodeUri = builder.append("<").append(nodeUri).append(">").toString();
    try {
      result = util.getPredicatesForTriplesMapsWithSameClass(tripleStoreUrl, context, classes);
    } catch (KarmaException e) {
      LOG.error("Unable to find predicates for triples maps with same class as: " + nodeUri, e);
    }
    final JSONArray array = new JSONArray();
    List<JSONObject> objects = new ArrayList<JSONObject>();
    List<String> concatenatedPredicateObjectMapsList = result.get("predicateObjectMaps");
    List<String> predicates = result.get("predicates");
    List<String> otherClasses = result.get("otherClasses");
    Iterator<String> concatenatedPredicateObjectMapsListItr = concatenatedPredicateObjectMapsList.iterator();
    Iterator<String> predicatesItr = predicates.iterator();
    Iterator<String> otherClassesItr = otherClasses.iterator();
    String hNodeId = FetchHNodeIdFromAlignmentCommand.gethNodeId(AlignmentManager.Instance().constructAlignmentId(workspace.getId(), worksheetId), columnUri);
    if (hNodeId == null) {
      return new UpdateContainer(new AbstractUpdate() {

        @Override
        public void generateJson(String prefix, PrintWriter pw, VWorkspace vWorkspace) {
          pw.print(array.toString());
        }
      });
    }
    HNode hnode = factory.getHNode(hNodeId);
    List<Table> dataTables = new ArrayList<Table>();
    CloneTableUtils.getDatatable(worksheet.getDataTable(), factory.getHTable(hnode.getHTableId()), dataTables, selection);
    KR2RMLBloomFilter uris = new KR2RMLBloomFilter(KR2RMLBloomFilter.defaultVectorSize, KR2RMLBloomFilter.defaultnbHash, Hash.JENKINS_HASH);
    Set<String> uriSet = new HashSet<String>();
    for(Table t : dataTables) {
      for(Row r : t.getRows(0, t.getNumRows(), selection)) {
        Node n = r.getNode(hNodeId);
        if(n != null && n.getValue() != null && !n.getValue().isEmptyValue() && n.getValue().asString() != null && !n.getValue().asString().trim().isEmpty() ) {
          String value = n.getValue().asString().trim().replace(" ", "");
          String baseURI = worksheet.getMetadataContainer().getWorksheetProperties().getPropertyValue(Property.baseURI);
          try {
            URI uri = new URI(value);
            if (!uri.isAbsolute() && baseURI != null) {
              value = baseURI + value;
            }
          } catch (URISyntaxException e) {
          }
          builder = new StringBuilder();
          value = builder.append("<").append(value).append(">").toString(); //String builder
          uriSet.add(value);
          uris.add(new Key(value.getBytes(UTF8_CHARSET)));
        }
      }
    }
    Set<String> maps = new HashSet<String>();
    Map<String, String> bloomfilterMapping = new HashMap<String, String>();
    try{
      for (String concatenatedPredicateObjectMaps : concatenatedPredicateObjectMapsList) {
        List<String> predicateObjectMaps = new ArrayList<String>(Arrays.asList(concatenatedPredicateObjectMaps.split(",")));
        maps.addAll(predicateObjectMaps);
        if (maps.size() > limit) {
          bloomfilterMapping.putAll(util.getBloomFiltersForMaps(tripleStoreUrl, null, maps));
          maps = new HashSet<String>();
        }
      }
      if (maps.size() > 0)
        bloomfilterMapping.putAll(util.getBloomFiltersForMaps(tripleStoreUrl, null, maps));
    } catch (KarmaException e1) {
      e1.printStackTrace();
    }
    while(concatenatedPredicateObjectMapsListItr.hasNext() && predicatesItr.hasNext() && otherClassesItr.hasNext())
    {
View Full Code Here

      }
      for (Entry<String, BloomFilterWorker> entry : workers.entrySet()) {
        while(!entry.getValue().isFinished());
        bfs.put(entry.getKey(), entry.getValue().getKR2RMLBloomFilter());
      }
      TripleStoreUtil utilObj = new TripleStoreUtil();
      Set<String> triplemaps = bfs.keySet();
      Map<String, String> bloomfilterMapping = new HashMap<String, String>();
      bloomfilterMapping.putAll(utilObj.getBloomFiltersForMaps(triplestoreURL, context, triplemaps));
      utilObj.updateTripleStoreWithBloomFilters(bfs, bloomfilterMapping, triplestoreURL, context);
      System.out.println("process time: " + (System.currentTimeMillis() - start));
      Map<String, String> verification = new HashMap<String, String>();
      verification.putAll(utilObj.getBloomFiltersForMaps(triplestoreURL, context, triplemaps));
      boolean verify = true;
      for (Entry<String, String> entry : verification.entrySet()) {
        String key = entry.getKey();
        String value = entry.getValue();
        KR2RMLBloomFilter bf2 = new KR2RMLBloomFilter(KR2RMLBloomFilter.defaultVectorSize, KR2RMLBloomFilter.defaultnbHash, Hash.JENKINS_HASH);
        KR2RMLBloomFilter bf = bfs.get(key);
        bf2.populateFromCompressedAndBase64EncodedString(value);
        bf2.and(bf);
        bf2.xor(bf);
        try {
          Field f = BloomFilter.class.getDeclaredField("bits");
          f.setAccessible(true);
          BitSet bits = (BitSet) f.get(bf2);
          if (bits.cardinality() != 0) {
            verify = false;
            break;
          }
        } catch (Exception e) {

        }
      }
      if (!verify) {
        utilObj.updateTripleStoreWithBloomFilters(bfs, verification, triplestoreURL, context);
      }
    }

  }
View Full Code Here

          List<String> rowIds = SubjectURIToRowId.get(uri)
          rowIds.add(r.getId());
        }
      }
    }
    TripleStoreUtil util = new TripleStoreUtil();

    //String modelContext = worksheet.getMetadataContainer().getWorksheetProperties().getPropertyValue(Property.modelContext);
    List<String> subjects = new LinkedList<String>();
    subjects.addAll(rowHashToSubjectURI.values());
    List<String> predicates = new LinkedList<String>();
    List<String> otherClasses = new LinkedList<String>();
    Map<String, List<String>> results = new HashMap<String, List<String>>();

    URIFormatter uriFormatter = new URIFormatter(workspace.getOntologyManager(), new ErrorReport());
    if(sameAsPredicate!= null && !sameAsPredicate.trim().isEmpty())
    {
      sameAsPredicate = uriFormatter.getExpandedAndNormalizedUri(sameAsPredicate);
    }

    JSONArray predicatesarray = new JSONArray(predicate);
    JSONArray otherClassarray = new JSONArray(otherClass);

    for(int i = 0; i < predicatesarray.length(); i++) {
      predicates.add(predicatesarray.getJSONObject(i).getString("predicate"));
      otherClasses.add(otherClassarray.getJSONObject(i).getString("otherClass"));
    }

    while (subjects.size() > 0) {
      ListIterator<String> subjectsIterator = subjects.listIterator();
      LinkedList<String> tempSubjects = new LinkedList<String>();
      while(tempSubjects.size() < limit && subjects.size() > 0)
      {
        tempSubjects.add(subjectsIterator.next());
        subjectsIterator.remove();
      }
      try {
        Map<String, List<String>> temp = null;
        if (!incoming)
          temp = util.getObjectsForSubjectsAndPredicates(dataRepoUrl, null, tempSubjects , predicates, otherClasses, sameAsPredicate);
        else
          temp = util.getSubjectsForPredicatesAndObjects(dataRepoUrl, null, tempSubjects , predicates, otherClasses, sameAsPredicate);
        addMappingToResults(results, temp);
        //        predicates.clear();
        //        otherClasses.clear();
      } catch (KarmaException e) {
        LOG.error("Unable to load data to augment: ", e);
View Full Code Here

  public UpdateContainer doIt(Workspace workspace) throws CommandException {

    Worksheet worksheet = workspace.getWorksheet(worksheetId);
    SuperSelection selection = getSuperSelection(worksheet);
    RepFactory factory = workspace.getFactory();
    TripleStoreUtil util = new TripleStoreUtil();
    HashMap<String, List<String>> result = null;
    nodeUri = nodeUri.trim();
    Map<String, Label> parents = workspace.getOntologyManager().getSuperClasses(nodeUri, true);
    Set<String> classes = new HashSet<String>(parents.keySet());
    classes.add(nodeUri);
    StringBuilder builder = new StringBuilder();
    nodeUri = builder.append("<").append(nodeUri).append(">").toString();
    try {
      result = util.getPredicatesForParentTriplesMapsWithSameClass(tripleStoreUrl, context, classes);
    } catch (KarmaException e) {
      LOG.error("Unable to find predicates for triples maps with same class as: " + nodeUri, e);
    }
    final JSONArray array = new JSONArray();
    List<JSONObject> objects = new ArrayList<JSONObject>();
    List<String> concatenatedPredicateObjectMapsList = result.get("refObjectMaps");
    List<String> predicates = result.get("predicates");
    List<String> otherClasses = result.get("otherClasses");
    Iterator<String> concatenatedPredicateObjectMapsListItr = concatenatedPredicateObjectMapsList.iterator();
    Iterator<String> predicatesItr = predicates.iterator();
    Iterator<String> otherClassesItr = otherClasses.iterator();
    String hNodeId = FetchHNodeIdFromAlignmentCommand.gethNodeId(AlignmentManager.Instance().constructAlignmentId(workspace.getId(), worksheetId), columnUri);
    HNode hnode = factory.getHNode(hNodeId);
    List<Table> dataTables = new ArrayList<Table>();
    CloneTableUtils.getDatatable(worksheet.getDataTable(), factory.getHTable(hnode.getHTableId()), dataTables, selection);
    KR2RMLBloomFilter uris = new KR2RMLBloomFilter(KR2RMLBloomFilter.defaultVectorSize, KR2RMLBloomFilter.defaultnbHash, Hash.JENKINS_HASH);
    Set<String> uriSet = new HashSet<String>();
    for(Table t : dataTables) {
      for(Row r : t.getRows(0, t.getNumRows(), selection)) {
        Node n = r.getNode(hNodeId);
        if(n != null && n.getValue() != null && !n.getValue().isEmptyValue() && n.getValue().asString() != null && !n.getValue().asString().trim().isEmpty() ) {
          String value = n.getValue().asString().trim().replace(" ", "");;
          builder = new StringBuilder();
          String baseURI = worksheet.getMetadataContainer().getWorksheetProperties().getPropertyValue(Property.baseURI);
          try {
            URI uri = new URI(value);
            if (!uri.isAbsolute() && baseURI != null) {
              value = baseURI + value;
            }
          } catch (URISyntaxException e) {
          }
          value = builder.append("<").append(value).append(">").toString(); //String builder
          uriSet.add(value);
          uris.add(new Key(value.getBytes(UTF8_CHARSET)));
        }
      }
    }
    Set<String> maps = new HashSet<String>();
    Map<String, String> bloomfilterMapping = new HashMap<String, String>();
    try{
      for (String concatenatedPredicateObjectMaps : concatenatedPredicateObjectMapsList) {
        List<String> predicateObjectMaps = new ArrayList<String>(Arrays.asList(concatenatedPredicateObjectMaps.split(",")));
        maps.addAll(predicateObjectMaps);
        if (maps.size() > limit) {
          bloomfilterMapping.putAll(util.getBloomFiltersForMaps(tripleStoreUrl, null, maps));
          maps = new HashSet<String>();
        }
      }
      if (maps.size() > 0)
        bloomfilterMapping.putAll(util.getBloomFiltersForMaps(tripleStoreUrl, null, maps));
    } catch (KarmaException e1) {
      e1.printStackTrace();
    }
    while(concatenatedPredicateObjectMapsListItr.hasNext() && predicatesItr.hasNext() && otherClassesItr.hasNext())
    {
View Full Code Here

  }
 
  @Override
  public UpdateContainer doIt(Workspace workspace) throws CommandException {

    TripleStoreUtil utilObj = new TripleStoreUtil();
    try
    {
      HashMap<String, List<String>> list = utilObj.fetchModelNames(this.tripleStoreUrl);
      return new UpdateContainer(new FetchR2RMLUpdate(list.get("model_names"), list.get("model_urls")));
    }
    catch (Exception e)
    {
      return new UpdateContainer(new ErrorUpdate("Unable to fetch R2RML models: " + e.getMessage()));
View Full Code Here

    // Generate the KR2RML data structures for the RDF generation
    final ErrorReport errorReport = new ErrorReport();
    KR2RMLMappingGenerator mappingGen = null;
    String url = worksheet.getMetadataContainer().getWorksheetProperties().getPropertyValue(Property.modelUrl);
    String modelContext = worksheet.getMetadataContainer().getWorksheetProperties().getPropertyValue(Property.modelContext);
    TripleStoreUtil utilObj = new TripleStoreUtil();
    String modelRepoUrl = worksheet.getMetadataContainer().getWorksheetProperties().getPropertyValue(Property.modelRepository);
    modelRepoUrl = modelRepoUrl == null || modelRepoUrl.isEmpty()? TripleStoreUtil.defaultModelsRepoUrl : modelRepoUrl;
    Map<String, String> bloomfilterMapping = new HashMap<String, String>();
    boolean result = true;
    try{
      mappingGen = new KR2RMLMappingGenerator(workspace, worksheet,

          alignment, worksheet.getSemanticTypes(), rdfSourcePrefix, rdfSourceNamespace,
          Boolean.valueOf(addInverseProperties), errorReport);
    }
    catch (KarmaException e)
    {
      logger.error("Error occured while generating RDF!", e);
      return new UpdateContainer(new ErrorUpdate("Error occured while generating RDF: " + e.getMessage()));
    }

    KR2RMLMapping mapping = mappingGen.getKR2RMLMapping();
    if (url != null && !url.trim().isEmpty() && modelContext != null && !modelContext.trim().isEmpty() && generateBloomFilters && utilObj.testURIExists(modelRepoUrl, "", url)) {
      try {
        File tmp = new File("tmp");
        PrintWriter pw = new PrintWriter(tmp);
        pw.println(utilObj.getMappingFromTripleStore(modelRepoUrl, modelContext, url));
        pw.close();
        Model model = WorksheetR2RMLJenaModelParser.loadSourceModelIntoJenaModel(tmp.toURI().toURL());
        tmp.delete();
        R2RMLMappingIdentifier identifier = new R2RMLMappingIdentifier(mapping.getId().getName(), new URL(url));
        WorksheetR2RMLJenaModelParser parser = new WorksheetR2RMLJenaModelParser(model, identifier);
        mapping = parser.parse();
      } catch (Exception e) {
        e.printStackTrace();
      }
    }
    logger.debug(mapping.toString());

    StringWriter sw = new StringWriter();
    // Generate the RDF using KR2RML data structures
    long start = 0;
    try {
      List<KR2RMLRDFWriter> writers = new ArrayList<KR2RMLRDFWriter>();
      File f = new File(rdfFileLocalPath);
      File parentDir = f.getParentFile();
      parentDir.mkdirs();
      BufferedWriter bw = new BufferedWriter(
          new OutputStreamWriter(new FileOutputStream(f),"UTF-8"));
      N3KR2RMLRDFWriter writer = new N3KR2RMLRDFWriter(new URIFormatter(workspace.getOntologyManager(), errorReport), new PrintWriter (bw));
      writer.setBaseURI(rdfSourceNamespace);
      writers.add(writer);
      if (generateBloomFilters && utilObj.testURIExists(modelRepoUrl, "", url)) {
        BloomFilterKR2RMLRDFWriter bfWriter = new BloomFilterKR2RMLRDFWriter(new PrintWriter(sw), false, this.rdfSourceNamespace);
        writers.add(bfWriter);
        bfWriter.setR2RMLMappingIdentifier(mapping.getId());
      }
      KR2RMLWorksheetRDFGenerator rdfGen = new KR2RMLWorksheetRDFGenerator(worksheet,
          workspace.getFactory(), workspace.getOntologyManager(),
          writers, false, mapping, errorReport, selection);

      rdfGen.generateRDF(true);
      logger.info("RDF written to file: " + rdfFileLocalPath);
      if(saveToStore){
        //take the contents of the RDF file and save them to the store
        logger.info("Using Jena DB:" + hostName + "/"+dbName + " user="+userName);
        saveToStore(rdfFileLocalPath);
      }
      start = System.currentTimeMillis();
      if (generateBloomFilters && utilObj.testURIExists(modelRepoUrl, "", url)) {
        JSONObject obj = new JSONObject(sw.toString());
        result &= updateTripleStore(obj, bloomfilterMapping, modelRepoUrl, modelContext, utilObj);
        Map<String, String> verification = new HashMap<String, String>();
        Set<String> triplemaps = new HashSet<String>(Arrays.asList(obj.getString("ids").split(",")));
        verification.putAll(utilObj.getBloomFiltersForMaps(modelRepoUrl, modelContext, triplemaps));
        boolean verify = true;
        for (Entry<String, String> entry : verification.entrySet()) {
          String key = entry.getKey();
          String value = entry.getValue();
          KR2RMLBloomFilter bf2 = new KR2RMLBloomFilter(KR2RMLBloomFilter.defaultVectorSize, KR2RMLBloomFilter.defaultnbHash, Hash.JENKINS_HASH);
          KR2RMLBloomFilter bf = new KR2RMLBloomFilter(KR2RMLBloomFilter.defaultVectorSize, KR2RMLBloomFilter.defaultnbHash, Hash.JENKINS_HASH);
          bf2.populateFromCompressedAndBase64EncodedString(value);
          bf.populateFromCompressedAndBase64EncodedString(obj.getString(key));
          bf2.and(bf);
          bf2.xor(bf);
          try {
            Field f1 = BloomFilter.class.getDeclaredField("bits");
            f1.setAccessible(true);
            BitSet bits = (BitSet) f1.get(bf2);
            if (bits.cardinality() != 0) {
              verify = false;
              break;
            }
          } catch (Exception e) {
           
          }
        }
        if (!verify) {
          result &= updateTripleStore(obj, verification, modelRepoUrl, modelContext, utilObj);
        }
        long end = System.currentTimeMillis();
        System.out.println("execution time: " + (end - start) + " node total: " + bloomfilterMapping.size());
      }
    } catch (Exception e1) {
      logger.error("Error occured while generating RDF!", e1);
      return new UpdateContainer(new ErrorUpdate("Error occured while generating RDF: " + e1.getMessage()));
    }
    try {

      // Get the graph name from properties if empty graph uri
      //      String graphName = worksheet.getMetadataContainer().getWorksheetProperties()
      //          .getPropertyValue(Property.graphName);
      //      if (this.graphUri == null || this.graphUri.isEmpty()) {
      //        // Set to default
      //        worksheet.getMetadataContainer().getWorksheetProperties().setPropertyValue(
      //            Property.graphName, WorksheetProperties.createDefaultGraphName(worksheet.getTitle()));
      //        this.graphUri = WorksheetProperties.createDefaultGraphName(worksheet.getTitle());
      //      }

      if (tripleStoreUrl == null || tripleStoreUrl.isEmpty()) {
        tripleStoreUrl = TripleStoreUtil.defaultDataRepoUrl;
      }
      logger.info("tripleStoreURl : " + tripleStoreUrl);


      result &= utilObj.saveToStoreFromFile(rdfFileLocalPath, tripleStoreUrl, this.graphUri, this.replaceContext, this.rdfSourceNamespace);
      if (url != null && !url.isEmpty() && url.compareTo("") != 0 && utilObj.testURIExists(modelRepoUrl, "", url)) {
        StringBuilder sb = new StringBuilder();
        url = url.trim();
        if(!url.startsWith("<"))
        {
          sb.append("<");
        }
        sb.append(url);
        if(!url.endsWith(">"))
        {
          sb.append(">");
        }
        sb.append(" <");
        sb.append( Uris.MODEL_HAS_DATA_URI);
        sb.append("> \"true\" .\n");
        String input = sb.toString();
        result &= utilObj.saveToStoreFromString(input, modelRepoUrl, modelContext, new Boolean(false), this.rdfSourceNamespace);
      }
      if(result) {
        logger.info("Saved rdf to store");
      } else {
        logger.error("Falied to store rdf to karma_data store");
View Full Code Here

          }
        }
      });
      return uc;
    }
    TripleStoreUtil util = new TripleStoreUtil();
    try {
      util.deleteMappingFromTripleStore(tripleStoreURL, context, mappingURI);
      SaveR2RMLModelCommandFactory scf = new SaveR2RMLModelCommandFactory();
      SaveR2RMLModelCommand command = scf.createCommand(workspace, mappingURI, tripleStoreURL, context, "URL");
      command.doIt(workspace);
    } catch (KarmaException e) {
      return new UpdateContainer(new ErrorUpdate("Error occured while deleting R2RML model!"));
View Full Code Here

            return;
        }
        filepath = (String) cl.getValue("--filepath");
        triplestoreURL = (String) cl.getValue("--triplestoreurl");
        predicate = (String) cl.getValue("--predicate");
        TripleStoreUtil utilObj = new TripleStoreUtil();
        Set<String> predicates = new HashSet<String>();
        predicates.add(predicate);
        List<String> predicateObjectMaps = new ArrayList<String>();
        for (String t : utilObj.getPredicatesForParentTriplesMapsWithSameClass(triplestoreURL, null, predicates).get("refObjectMaps")) {
          predicateObjectMaps.addAll(Arrays.asList(t.split(",")));
        }
        for (String t : utilObj.getPredicatesForTriplesMapsWithSameClass(triplestoreURL, null, predicates).get("predicateObjectMaps")) {
          predicateObjectMaps.addAll(Arrays.asList(t.split(",")));
        }
        Map<String, String> serializedmapping = utilObj.getBloomFiltersForMaps(triplestoreURL, null, predicateObjectMaps);
        Map<String, KR2RMLBloomFilter> mapping = new HashMap<String, KR2RMLBloomFilter>();
       
        for (Entry<String, String> entry : serializedmapping.entrySet()) {
          String key = entry.getKey();
          String value = entry.getValue();
          KR2RMLBloomFilter bf = new KR2RMLBloomFilter(KR2RMLBloomFilter.defaultVectorSize, KR2RMLBloomFilter.defaultnbHash, Hash.JENKINS_HASH);
          bf.populateFromCompressedAndBase64EncodedString(value);
          mapping.put(key, bf);
        }
       
        Model model = ModelFactory.createDefaultModel();
        InputStream s = new FileInputStream(new File(filepath));
    model.read(s, null, "TURTLE");
    StmtIterator iterator = model.listStatements();
    while(iterator.hasNext()) {
      Statement st = iterator.next();
      String subject = "<" + st.getSubject().toString() + ">";
      String object = "<" + st.getObject().toString() + ">";
      for (Entry<String, KR2RMLBloomFilter> entry : mapping.entrySet()) {
        KR2RMLBloomFilter bf = entry.getValue();
        if (bf.membershipTest(new Key(subject.getBytes("UTF-8"))))
          bf.add(new Key(object.getBytes("UTF-8")));
        if (bf.membershipTest(new Key(object.getBytes("UTF-8"))))
          bf.add(new Key(subject.getBytes("UTF-8")));
      }
    }
   
    utilObj.updateTripleStoreWithBloomFilters(mapping, serializedmapping, triplestoreURL, null);
   
  }
View Full Code Here

TOP

Related Classes of edu.isi.karma.er.helper.TripleStoreUtil

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.